{
    "document": {
        "category": "csaf_base",
        "csaf_version": "2.0",
        "distribution": {
            "tlp": {
                "label": "WHITE"
            }
        },
        "lang": "en",
        "notes": [
            {
                "category": "legal_disclaimer",
                "text": "The Netherlands Cyber Security Center (henceforth: NCSC-NL) maintains this portal to enhance access to its information and vulnerabilities. The use of this information is subject to the following terms and conditions:\n\nThe vulnerabilities disclosed in this portal are gathered by NCSC-NL from a variety of open sources, which the user can retrieve from other platforms. NCSC-NL makes every reasonable effort to ensure that the content of this portal is kept up to date, and that it is accurate and complete. Nevertheless, NCSC-NL cannot entirely rule out the possibility of errors, and therefore cannot give any warranty in respect of its completeness, accuracy or real-time keeping up-to-date. NCSC-NL does not control nor guarantee the accuracy, relevance, timeliness or completeness of information obtained from these external sources. The vulnerabilities disclosed in this portal are intended solely for the convenience of professional parties to take appropriate measures to manage the risks posed to the cybersecurity. No rights can be derived from the information provided therein.\n\nNCSC-NL and the Kingdom of the Netherlands assume no legal liability or responsibility for any damage resulting from either the use or inability of use of the vulnerabilities disclosed in this portal. This includes damage resulting from the inaccuracy of incompleteness of the information contained in it.\nThe information on this page is subject to Dutch law. All disputes related to or arising from the use of this portal regarding the disclosure of vulnerabilities will be submitted to the competent court in The Hague. This choice of means also applies to the court in summary proceedings."
            }
        ],
        "publisher": {
            "category": "coordinator",
            "contact_details": "cert@ncsc.nl",
            "name": "National Cyber Security Centre",
            "namespace": "https://www.ncsc.nl/"
        },
        "title": "CVE-2026-32887",
        "tracking": {
            "current_release_date": "2026-03-26T00:21:26.793358Z",
            "generator": {
                "date": "2026-02-17T15:00:00Z",
                "engine": {
                    "name": "V.E.L.M.A",
                    "version": "1.7"
                }
            },
            "id": "CVE-2026-32887",
            "initial_release_date": "2026-03-20T20:40:26.871810Z",
            "revision_history": [
                {
                    "date": "2026-03-20T20:40:26.871810Z",
                    "number": "1",
                    "summary": "CVE created.| Source created.| CVE status created. (valid)| Description created for source.| CVSS created.| References created (2).| CWES updated (1)."
                },
                {
                    "date": "2026-03-20T20:40:29.604714Z",
                    "number": "2",
                    "summary": "NCSC Score created."
                },
                {
                    "date": "2026-03-20T22:25:13.173695Z",
                    "number": "3",
                    "summary": "Source created.| CVE status created. (valid)| Description created for source.| CVSS created.| References created (1).| CWES updated (1)."
                },
                {
                    "date": "2026-03-20T22:25:15.566283Z",
                    "number": "4",
                    "summary": "NCSC Score updated."
                },
                {
                    "date": "2026-03-20T22:41:13.991645Z",
                    "number": "5",
                    "summary": "Source created.| CVE status created. (valid)| Description created for source.| CVSS created.| Products created (1).| References created (1).| CWES updated (1)."
                },
                {
                    "date": "2026-03-20T22:41:16.955386Z",
                    "number": "6",
                    "summary": "NCSC Score updated."
                },
                {
                    "date": "2026-03-21T15:23:42.522123Z",
                    "number": "7",
                    "summary": "Source connected.| CVE status created. (valid)| EPSS created."
                },
                {
                    "date": "2026-03-25T14:39:15.509397Z",
                    "number": "8",
                    "summary": "Unknown change."
                },
                {
                    "date": "2026-03-25T18:42:28.417895Z",
                    "number": "9",
                    "summary": "References created (1)."
                },
                {
                    "date": "2026-03-26T00:21:23.049540Z",
                    "number": "10",
                    "summary": "Source created.| CVE status created. (valid)| Description created for source.| CVSS created.| Products created (1).| References created (2).| CWES updated (1)."
                }
            ],
            "status": "interim",
            "version": "10"
        }
    },
    "product_tree": {
        "branches": [
            {
                "branches": [
                    {
                        "branches": [
                            {
                                "category": "product_version_range",
                                "name": "vers:unknown/<3.20.0",
                                "product": {
                                    "name": "vers:unknown/<3.20.0",
                                    "product_id": "CSAFPID-5877667"
                                }
                            },
                            {
                                "category": "product_version_range",
                                "name": "vers:unknown/>=0|<3.20.0",
                                "product": {
                                    "name": "vers:unknown/>=0|<3.20.0",
                                    "product_id": "CSAFPID-5912502"
                                }
                            }
                        ],
                        "category": "product_name",
                        "name": "effect"
                    }
                ],
                "category": "vendor",
                "name": "Effect-TS"
            }
        ]
    },
    "vulnerabilities": [
        {
            "cve": "CVE-2026-32887",
            "cwe": {
                "id": "CWE-362",
                "name": "Concurrent Execution using Shared Resource with Improper Synchronization ('Race Condition')"
            },
            "notes": [
                {
                    "category": "description",
                    "text": "## Versions\n\n- `effect`: 3.19.15\n- `@effect/rpc`: 0.72.1\n- `@effect/platform`: 0.94.2\n- Node.js: v22.20.0\n- Vercel runtime with Fluid compute\n- Next.js: 16 (App Router)\n- `@clerk/nextjs`: 6.x\n\n## Root cause\n\nEffect's `MixedScheduler` batches fiber continuations and drains them inside a **single** microtask or timer callback. The `AsyncLocalStorage` context active during that callback belongs to whichever request first triggered the scheduler's drain cycle — **not** the request that owns the fiber being resumed.\n\n### Detailed mechanism\n\n#### 1. Scheduler batching (`effect/src/Scheduler.ts`, `MixedScheduler`)\n\n```typescript\n// MixedScheduler.starve() — called once when first task is scheduled\nprivate starve(depth = 0) {\n  if (depth >= this.maxNextTickBeforeTimer) {\n    setTimeout(() => this.starveInternal(0), 0)       // timer queue\n  } else {\n    Promise.resolve(void 0).then(() => this.starveInternal(depth + 1)) // microtask queue\n  }\n}\n\n// MixedScheduler.starveInternal() — drains ALL accumulated tasks in one call\nprivate starveInternal(depth: number) {\n  const tasks = this.tasks.buckets\n  this.tasks.buckets = []\n  for (const [_, toRun] of tasks) {\n    for (let i = 0; i < toRun.length; i++) {\n      toRun[i]()  // ← Every fiber continuation runs in the SAME ALS context\n    }\n  }\n  // ...\n}\n```\n\n`scheduleTask` only calls `starve()` when `running` is `false`. Subsequent tasks accumulate in `this.tasks` until `starveInternal` drains them all. The `Promise.then()` (or `setTimeout`) callback inherits the ALS context from whichever call site created it — i.e., whichever request's fiber first set `running = true`.\n\n**Result:** Under concurrent load, fiber continuations from Request A and Request B execute inside the same `starveInternal` call, sharing a single ALS context. If Request A triggered `starve()`, then Request B's fiber reads Request A's ALS context.\n\n#### 2. `toWebHandlerRuntime` does not propagate ALS (`@effect/platform/src/HttpApp.ts:211-240`)\n\n```typescript\nexport const toWebHandlerRuntime = <R>(runtime: Runtime.Runtime<R>) => {\n  const httpRuntime: Types.Mutable<Runtime.Runtime<R>> = Runtime.make(runtime)\n  const run = Runtime.runFork(httpRuntime)\n  return <E>(self: Default<E, R | Scope.Scope>, middleware?) => {\n    return (request: Request, context?): Promise<Response> =>\n      new Promise((resolve) => {\n        // Per-request Effect context is correctly set via contextMap:\n        const contextMap = new Map<string, any>(runtime.context.unsafeMap)\n        const httpServerRequest = ServerRequest.fromWeb(request)\n        contextMap.set(ServerRequest.HttpServerRequest.key, httpServerRequest)\n        httpRuntime.context = Context.unsafeMake(contextMap)\n\n        // But the fiber is forked without any ALS propagation:\n        const fiber = run(httpApp as any)  // ← ALS context is NOT captured or restored\n      })\n  }\n}\n```\n\nEffect's own `Context` (containing `HttpServerRequest`) is correctly set per-request. But the **Node.js ALS context** — which frameworks like Next.js, Clerk, and OpenTelemetry rely on — is not captured at fork time or restored when the fiber's continuations execute.\n\n#### 3. The dangerous pattern this enables\n\n```typescript\n// RPC handler — runs inside an Effect fiber\nconst handler = Effect.gen(function*() {\n  // This calls auth() from @clerk/nextjs/server, which reads from ALS\n  const { userId } = yield* Effect.tryPromise({\n    try: async () => auth(),  // ← may read WRONG user's session\n    catch: () => new UnauthorizedError({ message: \"Auth failed\" })\n  })\n  return yield* repository.getUser(userId)\n})\n```\n\nThe `async () => auth()` thunk executes when the fiber continuation is scheduled by `MixedScheduler`. At that point, the ALS context belongs to an arbitrary concurrent request.\n\n## Reproduction scenario\n\n```\nTimeline (two concurrent requests to the same toWebHandler endpoint):\n\nT0: Request A arrives → POST handler → webHandler(requestA)\n    → Promise executor runs synchronously\n    → httpRuntime.context set to A's context\n    → fiber A forked, runs first ops synchronously\n    → fiber A yields (e.g., at Effect.tryPromise boundary)\n    → scheduler.scheduleTask(fiberA_continuation)\n    → running=false → starve() called → Promise.resolve().then(drain)\n       ↑ ALS context captured = Request A's context\n\nT1: Request B arrives → POST handler → webHandler(requestB)\n    → Promise executor runs synchronously\n    → httpRuntime.context set to B's context\n    → fiber B forked, runs first ops synchronously\n    → fiber B yields\n    → scheduler.scheduleTask(fiberB_continuation)\n    → running=true → task queued, no new starve()\n\nT2: Microtask fires → starveInternal() runs\n    → Drains fiberA_continuation → auth() reads ALS → gets A's context ✓\n    → Drains fiberB_continuation → auth() reads ALS → gets A's context ✗ ← WRONG USER\n```\n\n## Minimal reproduction\n\n```typescript\nimport { AsyncLocalStorage } from \"node:async_hooks\"\nimport { Effect, Layer } from \"effect\"\nimport { RpcServer, RpcSerialization, Rpc, RpcGroup } from \"@effect/rpc\"\nimport { HttpServer } from \"@effect/platform\"\nimport * as S from \"effect/Schema\"\n\n// Simulate a framework's ALS (like Next.js / Clerk)\nconst requestStore = new AsyncLocalStorage<{ userId: string }>()\n\nclass GetUser extends Rpc.make(\"GetUser\", {\n  success: S.Struct({ userId: S.String, alsUserId: S.String }),\n  failure: S.Never,\n  payload: {}\n}) {}\n\nconst MyRpc = RpcGroup.make(\"MyRpc\").add(GetUser)\n\nconst MyRpcLive = MyRpc.toLayer(\n  RpcGroup.toHandlers(MyRpc, {\n    GetUser: () =>\n      Effect.gen(function*() {\n        // Simulate calling an ALS-dependent API inside an Effect fiber\n        const alsResult = yield* Effect.tryPromise({\n          try: async () => {\n            const store = requestStore.getStore()\n            return store?.userId ?? \"NONE\"\n          },\n          catch: () => { throw new Error(\"impossible\") }\n        })\n        return { userId: \"from-effect-context\", alsUserId: alsResult }\n      })\n  })\n)\n\nconst RpcLayer = MyRpcLive.pipe(\n  Layer.provideMerge(RpcSerialization.layerJson),\n  Layer.provideMerge(HttpServer.layerContext)\n)\n\nconst { handler } = RpcServer.toWebHandler(MyRpc, { layer: RpcLayer })\n\n// Simulate two concurrent requests with different ALS contexts\nasync function main() {\n  const results = await Promise.all([\n    requestStore.run({ userId: \"user-A\" }, () => handler(makeRpcRequest(\"GetUser\"))),\n    requestStore.run({ userId: \"user-B\" }, () => handler(makeRpcRequest(\"GetUser\"))),\n  ])\n\n  // Parse responses and check if alsUserId matches the expected user\n  // Under the bug: both responses may show \"user-A\" (or one shows the other's)\n  for (const res of results) {\n    console.log(await res.json())\n  }\n}\n```\n\n## Impact\n\n| Symptom | Severity |\n|---------|----------|\n| `auth()` returns wrong user's session | **Critical** — authentication bypass |\n| `cookies()` / `headers()` from Next.js read wrong request | **High** — data leakage |\n| OpenTelemetry trace context crosses requests | **Medium** — incorrect traces |\n| Works locally, fails in production | Hard to diagnose — only manifests under concurrent load |\n\n## Workaround\n\nCapture ALS-dependent values **before** entering the Effect runtime and pass them via Effect's own context system:\n\n```typescript\n// In the route handler — OUTSIDE the Effect fiber (ALS is correct here)\nexport const POST = async (request: Request) => {\n  const { userId } = await auth()  // ← Safe: still in Next.js ALS context\n\n  // Inject into request headers or use the `context` parameter\n  const headers = new Headers(request.headers)\n  headers.set(\"x-clerk-auth-user-id\", userId ?? \"\")\n  const enrichedRequest = new Request(request.url, {\n    method: request.method,\n    headers,\n    body: request.body,\n    duplex: \"half\" as any,\n  })\n\n  return webHandler(enrichedRequest)\n}\n\n// In Effect handlers — read from HttpServerRequest headers instead of calling auth()\nconst getAuthenticatedUserId = Effect.gen(function*() {\n  const req = yield* HttpServerRequest.HttpServerRequest\n  const userId = req.headers[\"x-clerk-auth-user-id\"]\n  if (!userId) return yield* Effect.fail(new UnauthorizedError({ message: \"Auth required\" }))\n  return userId\n})\n```\n\n## Suggested fix (for Effect maintainers)\n\n### Option A: Propagate ALS context through the scheduler\n\nCapture the `AsyncLocalStorage` snapshot when a fiber continuation is scheduled, and restore it when the continuation executes:\n\n```typescript\n// In MixedScheduler or the fiber runtime\nimport { AsyncLocalStorage } from \"node:async_hooks\"\n\nscheduleTask(task: Task, priority: number) {\n  // Capture current ALS context\n  const snapshot = AsyncLocalStorage.snapshot()\n  this.tasks.scheduleTask(() => snapshot(task), priority)\n  // ...\n}\n```\n\n`AsyncLocalStorage.snapshot()` (Node.js 20.5+) returns a function that, when called, restores the ALS context from the point of capture. This ensures each fiber continuation runs with its originating request's ALS context.\n\n**Trade-off:** Adds one closure allocation per scheduled task. Could be opt-in via a `FiberRef` or scheduler option.\n\n### Option B: Capture ALS at `runFork` and restore per fiber step\n\nWhen `Runtime.runFork` is called, capture the ALS snapshot and associate it with the fiber. Before each fiber step (in the fiber runtime's `evaluateEffect` loop), restore the snapshot.\n\n**Trade-off:** More invasive but provides correct ALS propagation for the fiber's entire lifetime, including across `flatMap` chains and `Effect.tryPromise` thunks.\n\n### Option C: Document the limitation and provide a `context` injection API\n\nIf ALS propagation is intentionally not supported, document this prominently and provide a first-class API for `toWebHandler` to accept per-request context. The existing `context?: Context.Context<never>` parameter on the handler function partially addresses this, but it requires callers to know about the issue and manually extract values before entering Effect.\n\n## Related\n\n- Node.js `AsyncLocalStorage` docs: https://nodejs.org/api/async_context.html\n- `AsyncLocalStorage.snapshot()`: https://nodejs.org/api/async_context.html#static-method-asynclocalstoragesnapshot\n- Next.js uses ALS for `cookies()`, `headers()`, `auth()` in App Router\n- Similar issue pattern in other fiber-based runtimes (e.g., ZIO has `FiberRef` propagation for this)\n\n\n## POC replica of my setup\n\n```\n// Create web handler from Effect RPC\n// sharedMemoMap ensures all RPC routes share the same connection pool\nconst { handler: webHandler, dispose } = RpcServer.toWebHandler(DemoRpc, {\n  layer: RpcLayer,\n  memoMap: sharedMemoMap,\n});\n\n/**\n * POST /api/rpc/demo\n */\nexport const POST = async (request: Request) => {\n  return webHandler(request);\n};\n\nregisterDispose(dispose);\n```\n\n### Used util functions\n\n```\n\n/**\n * Creates a dispose registry that collects dispose callbacks and runs them\n * when `runAll` is invoked. Handles both sync and async dispose functions,\n * catching errors to prevent one failing dispose from breaking others.\n *\n * @internal Exported for testing — use `registerDispose` in application code.\n */\nexport const makeDisposeRegistry = () => {\n  const disposeFns: Array<() => void | Promise<void>> = []\n\n  const runAll = () => {\n    for (const fn of disposeFns) {\n      try {\n        const result = fn()\n        if (result && typeof result.then === \"function\") {\n          result.then(undefined, (err: unknown) => console.error(\"Dispose error:\", err))\n        }\n      } catch (err) {\n        console.error(\"Dispose error:\", err)\n      }\n    }\n  }\n\n  const register = (dispose: () => void | Promise<void>) => {\n    disposeFns.push(dispose)\n  }\n\n  return { register, runAll }\n}\n\nexport const registerDispose: (dispose: () => void | Promise<void>) => void = globalValue(\n  Symbol.for(\"@global/RegisterDispose\"),\n  () => {\n    const registry = makeDisposeRegistry()\n\n    if (typeof process !== \"undefined\") {\n      process.once(\"beforeExit\", registry.runAll)\n    }\n\n    return registry.register\n  }\n)\n```\n\n### The actual effect that was run within the RPC context that the bug was found\n\n```\nexport const getAuthenticatedUserId: Effect.Effect<string, UnauthorizedError> =\n  Effect.gen(function*() {\n    const authResult = yield* Effect.tryPromise({\n      try: async () => auth(),\n      catch: () =>\n        new UnauthorizedError({\n          message: \"Failed to get auth session\"\n        })\n    })\n\n    if (!authResult.userId) {\n      return yield* Effect.fail(\n        new UnauthorizedError({\n          message: \"Authentication required\"\n        })\n      )\n    }\n\n    return authResult.userId\n  })\n ```",
                    "title": "github - https://api.github.com/advisories/GHSA-38f7-945m-qr2g"
                },
                {
                    "category": "description",
                    "text": "Effect is a TypeScript framework that consists of several packages that work together to help build TypeScript applications. Prior to version 3.20.0, when using `RpcServer.toWebHandler` (or `HttpApp.toWebHandlerRuntime`) inside a Next.js App Router route handler, any Node.js `AsyncLocalStorage`-dependent API called from within an Effect fiber can read another concurrent request's context — or no context at all. Under production traffic, `auth()` from `@clerk/nextjs/server` returns a different user's session. Version 3.20.0 contains a fix for the issue.",
                    "title": "nvd - https://services.nvd.nist.gov/rest/json/cves/2.0?cveId=CVE-2026-32887"
                },
                {
                    "category": "description",
                    "text": "Effect is a TypeScript framework that consists of several packages that work together to help build TypeScript applications. Prior to version 3.20.0, when using `RpcServer.toWebHandler` (or `HttpApp.toWebHandlerRuntime`) inside a Next.js App Router route handler, any Node.js `AsyncLocalStorage`-dependent API called from within an Effect fiber can read another concurrent request's context — or no context at all. Under production traffic, `auth()` from `@clerk/nextjs/server` returns a different user's session. Version 3.20.0 contains a fix for the issue.",
                    "title": "cveprojectv5 - https://raw.githubusercontent.com/CVEProject/cvelistV5/main/cves/2026/32xxx/CVE-2026-32887.json"
                },
                {
                    "category": "description",
                    "text": "## Versions\n\n- `effect`: 3.19.15\n- `@effect/rpc`: 0.72.1\n- `@effect/platform`: 0.94.2\n- Node.js: v22.20.0\n- Vercel runtime with Fluid compute\n- Next.js: 16 (App Router)\n- `@clerk/nextjs`: 6.x\n\n## Root cause\n\nEffect's `MixedScheduler` batches fiber continuations and drains them inside a **single** microtask or timer callback. The `AsyncLocalStorage` context active during that callback belongs to whichever request first triggered the scheduler's drain cycle — **not** the request that owns the fiber being resumed.\n\n### Detailed mechanism\n\n#### 1. Scheduler batching (`effect/src/Scheduler.ts`, `MixedScheduler`)\n\n```typescript\n// MixedScheduler.starve() — called once when first task is scheduled\nprivate starve(depth = 0) {\n  if (depth >= this.maxNextTickBeforeTimer) {\n    setTimeout(() => this.starveInternal(0), 0)       // timer queue\n  } else {\n    Promise.resolve(void 0).then(() => this.starveInternal(depth + 1)) // microtask queue\n  }\n}\n\n// MixedScheduler.starveInternal() — drains ALL accumulated tasks in one call\nprivate starveInternal(depth: number) {\n  const tasks = this.tasks.buckets\n  this.tasks.buckets = []\n  for (const [_, toRun] of tasks) {\n    for (let i = 0; i < toRun.length; i++) {\n      toRun[i]()  // ← Every fiber continuation runs in the SAME ALS context\n    }\n  }\n  // ...\n}\n```\n\n`scheduleTask` only calls `starve()` when `running` is `false`. Subsequent tasks accumulate in `this.tasks` until `starveInternal` drains them all. The `Promise.then()` (or `setTimeout`) callback inherits the ALS context from whichever call site created it — i.e., whichever request's fiber first set `running = true`.\n\n**Result:** Under concurrent load, fiber continuations from Request A and Request B execute inside the same `starveInternal` call, sharing a single ALS context. If Request A triggered `starve()`, then Request B's fiber reads Request A's ALS context.\n\n#### 2. `toWebHandlerRuntime` does not propagate ALS (`@effect/platform/src/HttpApp.ts:211-240`)\n\n```typescript\nexport const toWebHandlerRuntime = <R>(runtime: Runtime.Runtime<R>) => {\n  const httpRuntime: Types.Mutable<Runtime.Runtime<R>> = Runtime.make(runtime)\n  const run = Runtime.runFork(httpRuntime)\n  return <E>(self: Default<E, R | Scope.Scope>, middleware?) => {\n    return (request: Request, context?): Promise<Response> =>\n      new Promise((resolve) => {\n        // Per-request Effect context is correctly set via contextMap:\n        const contextMap = new Map<string, any>(runtime.context.unsafeMap)\n        const httpServerRequest = ServerRequest.fromWeb(request)\n        contextMap.set(ServerRequest.HttpServerRequest.key, httpServerRequest)\n        httpRuntime.context = Context.unsafeMake(contextMap)\n\n        // But the fiber is forked without any ALS propagation:\n        const fiber = run(httpApp as any)  // ← ALS context is NOT captured or restored\n      })\n  }\n}\n```\n\nEffect's own `Context` (containing `HttpServerRequest`) is correctly set per-request. But the **Node.js ALS context** — which frameworks like Next.js, Clerk, and OpenTelemetry rely on — is not captured at fork time or restored when the fiber's continuations execute.\n\n#### 3. The dangerous pattern this enables\n\n```typescript\n// RPC handler — runs inside an Effect fiber\nconst handler = Effect.gen(function*() {\n  // This calls auth() from @clerk/nextjs/server, which reads from ALS\n  const { userId } = yield* Effect.tryPromise({\n    try: async () => auth(),  // ← may read WRONG user's session\n    catch: () => new UnauthorizedError({ message: \"Auth failed\" })\n  })\n  return yield* repository.getUser(userId)\n})\n```\n\nThe `async () => auth()` thunk executes when the fiber continuation is scheduled by `MixedScheduler`. At that point, the ALS context belongs to an arbitrary concurrent request.\n\n## Reproduction scenario\n\n```\nTimeline (two concurrent requests to the same toWebHandler endpoint):\n\nT0: Request A arrives → POST handler → webHandler(requestA)\n    → Promise executor runs synchronously\n    → httpRuntime.context set to A's context\n    → fiber A forked, runs first ops synchronously\n    → fiber A yields (e.g., at Effect.tryPromise boundary)\n    → scheduler.scheduleTask(fiberA_continuation)\n    → running=false → starve() called → Promise.resolve().then(drain)\n       ↑ ALS context captured = Request A's context\n\nT1: Request B arrives → POST handler → webHandler(requestB)\n    → Promise executor runs synchronously\n    → httpRuntime.context set to B's context\n    → fiber B forked, runs first ops synchronously\n    → fiber B yields\n    → scheduler.scheduleTask(fiberB_continuation)\n    → running=true → task queued, no new starve()\n\nT2: Microtask fires → starveInternal() runs\n    → Drains fiberA_continuation → auth() reads ALS → gets A's context ✓\n    → Drains fiberB_continuation → auth() reads ALS → gets A's context ✗ ← WRONG USER\n```\n\n## Minimal reproduction\n\n```typescript\nimport { AsyncLocalStorage } from \"node:async_hooks\"\nimport { Effect, Layer } from \"effect\"\nimport { RpcServer, RpcSerialization, Rpc, RpcGroup } from \"@effect/rpc\"\nimport { HttpServer } from \"@effect/platform\"\nimport * as S from \"effect/Schema\"\n\n// Simulate a framework's ALS (like Next.js / Clerk)\nconst requestStore = new AsyncLocalStorage<{ userId: string }>()\n\nclass GetUser extends Rpc.make(\"GetUser\", {\n  success: S.Struct({ userId: S.String, alsUserId: S.String }),\n  failure: S.Never,\n  payload: {}\n}) {}\n\nconst MyRpc = RpcGroup.make(\"MyRpc\").add(GetUser)\n\nconst MyRpcLive = MyRpc.toLayer(\n  RpcGroup.toHandlers(MyRpc, {\n    GetUser: () =>\n      Effect.gen(function*() {\n        // Simulate calling an ALS-dependent API inside an Effect fiber\n        const alsResult = yield* Effect.tryPromise({\n          try: async () => {\n            const store = requestStore.getStore()\n            return store?.userId ?? \"NONE\"\n          },\n          catch: () => { throw new Error(\"impossible\") }\n        })\n        return { userId: \"from-effect-context\", alsUserId: alsResult }\n      })\n  })\n)\n\nconst RpcLayer = MyRpcLive.pipe(\n  Layer.provideMerge(RpcSerialization.layerJson),\n  Layer.provideMerge(HttpServer.layerContext)\n)\n\nconst { handler } = RpcServer.toWebHandler(MyRpc, { layer: RpcLayer })\n\n// Simulate two concurrent requests with different ALS contexts\nasync function main() {\n  const results = await Promise.all([\n    requestStore.run({ userId: \"user-A\" }, () => handler(makeRpcRequest(\"GetUser\"))),\n    requestStore.run({ userId: \"user-B\" }, () => handler(makeRpcRequest(\"GetUser\"))),\n  ])\n\n  // Parse responses and check if alsUserId matches the expected user\n  // Under the bug: both responses may show \"user-A\" (or one shows the other's)\n  for (const res of results) {\n    console.log(await res.json())\n  }\n}\n```\n\n## Impact\n\n| Symptom | Severity |\n|---------|----------|\n| `auth()` returns wrong user's session | **Critical** — authentication bypass |\n| `cookies()` / `headers()` from Next.js read wrong request | **High** — data leakage |\n| OpenTelemetry trace context crosses requests | **Medium** — incorrect traces |\n| Works locally, fails in production | Hard to diagnose — only manifests under concurrent load |\n\n## Workaround\n\nCapture ALS-dependent values **before** entering the Effect runtime and pass them via Effect's own context system:\n\n```typescript\n// In the route handler — OUTSIDE the Effect fiber (ALS is correct here)\nexport const POST = async (request: Request) => {\n  const { userId } = await auth()  // ← Safe: still in Next.js ALS context\n\n  // Inject into request headers or use the `context` parameter\n  const headers = new Headers(request.headers)\n  headers.set(\"x-clerk-auth-user-id\", userId ?? \"\")\n  const enrichedRequest = new Request(request.url, {\n    method: request.method,\n    headers,\n    body: request.body,\n    duplex: \"half\" as any,\n  })\n\n  return webHandler(enrichedRequest)\n}\n\n// In Effect handlers — read from HttpServerRequest headers instead of calling auth()\nconst getAuthenticatedUserId = Effect.gen(function*() {\n  const req = yield* HttpServerRequest.HttpServerRequest\n  const userId = req.headers[\"x-clerk-auth-user-id\"]\n  if (!userId) return yield* Effect.fail(new UnauthorizedError({ message: \"Auth required\" }))\n  return userId\n})\n```\n\n## Suggested fix (for Effect maintainers)\n\n### Option A: Propagate ALS context through the scheduler\n\nCapture the `AsyncLocalStorage` snapshot when a fiber continuation is scheduled, and restore it when the continuation executes:\n\n```typescript\n// In MixedScheduler or the fiber runtime\nimport { AsyncLocalStorage } from \"node:async_hooks\"\n\nscheduleTask(task: Task, priority: number) {\n  // Capture current ALS context\n  const snapshot = AsyncLocalStorage.snapshot()\n  this.tasks.scheduleTask(() => snapshot(task), priority)\n  // ...\n}\n```\n\n`AsyncLocalStorage.snapshot()` (Node.js 20.5+) returns a function that, when called, restores the ALS context from the point of capture. This ensures each fiber continuation runs with its originating request's ALS context.\n\n**Trade-off:** Adds one closure allocation per scheduled task. Could be opt-in via a `FiberRef` or scheduler option.\n\n### Option B: Capture ALS at `runFork` and restore per fiber step\n\nWhen `Runtime.runFork` is called, capture the ALS snapshot and associate it with the fiber. Before each fiber step (in the fiber runtime's `evaluateEffect` loop), restore the snapshot.\n\n**Trade-off:** More invasive but provides correct ALS propagation for the fiber's entire lifetime, including across `flatMap` chains and `Effect.tryPromise` thunks.\n\n### Option C: Document the limitation and provide a `context` injection API\n\nIf ALS propagation is intentionally not supported, document this prominently and provide a first-class API for `toWebHandler` to accept per-request context. The existing `context?: Context.Context<never>` parameter on the handler function partially addresses this, but it requires callers to know about the issue and manually extract values before entering Effect.\n\n## Related\n\n- Node.js `AsyncLocalStorage` docs: https://nodejs.org/api/async_context.html\n- `AsyncLocalStorage.snapshot()`: https://nodejs.org/api/async_context.html#static-method-asynclocalstoragesnapshot\n- Next.js uses ALS for `cookies()`, `headers()`, `auth()` in App Router\n- Similar issue pattern in other fiber-based runtimes (e.g., ZIO has `FiberRef` propagation for this)\n\n\n## POC replica of my setup\n\n```\n// Create web handler from Effect RPC\n// sharedMemoMap ensures all RPC routes share the same connection pool\nconst { handler: webHandler, dispose } = RpcServer.toWebHandler(DemoRpc, {\n  layer: RpcLayer,\n  memoMap: sharedMemoMap,\n});\n\n/**\n * POST /api/rpc/demo\n */\nexport const POST = async (request: Request) => {\n  return webHandler(request);\n};\n\nregisterDispose(dispose);\n```\n\n### Used util functions\n\n```\n\n/**\n * Creates a dispose registry that collects dispose callbacks and runs them\n * when `runAll` is invoked. Handles both sync and async dispose functions,\n * catching errors to prevent one failing dispose from breaking others.\n *\n * @internal Exported for testing — use `registerDispose` in application code.\n */\nexport const makeDisposeRegistry = () => {\n  const disposeFns: Array<() => void | Promise<void>> = []\n\n  const runAll = () => {\n    for (const fn of disposeFns) {\n      try {\n        const result = fn()\n        if (result && typeof result.then === \"function\") {\n          result.then(undefined, (err: unknown) => console.error(\"Dispose error:\", err))\n        }\n      } catch (err) {\n        console.error(\"Dispose error:\", err)\n      }\n    }\n  }\n\n  const register = (dispose: () => void | Promise<void>) => {\n    disposeFns.push(dispose)\n  }\n\n  return { register, runAll }\n}\n\nexport const registerDispose: (dispose: () => void | Promise<void>) => void = globalValue(\n  Symbol.for(\"@global/RegisterDispose\"),\n  () => {\n    const registry = makeDisposeRegistry()\n\n    if (typeof process !== \"undefined\") {\n      process.once(\"beforeExit\", registry.runAll)\n    }\n\n    return registry.register\n  }\n)\n```\n\n### The actual effect that was run within the RPC context that the bug was found\n\n```\nexport const getAuthenticatedUserId: Effect.Effect<string, UnauthorizedError> =\n  Effect.gen(function*() {\n    const authResult = yield* Effect.tryPromise({\n      try: async () => auth(),\n      catch: () =>\n        new UnauthorizedError({\n          message: \"Failed to get auth session\"\n        })\n    })\n\n    if (!authResult.userId) {\n      return yield* Effect.fail(\n        new UnauthorizedError({\n          message: \"Authentication required\"\n        })\n      )\n    }\n\n    return authResult.userId\n  })\n ```",
                    "title": "osv - https://www.googleapis.com/download/storage/v1/b/osv-vulnerabilities/o/npm%2FGHSA-38f7-945m-qr2g.json?alt=media"
                },
                {
                    "category": "other",
                    "text": "9e-05",
                    "title": "EPSS"
                },
                {
                    "category": "other",
                    "text": "4.0",
                    "title": "NCSC Score"
                },
                {
                    "category": "other",
                    "text": "There is cwe data available from source Nvd",
                    "title": "NCSC Score top decreasing factors"
                }
            ],
            "product_status": {
                "known_affected": [
                    "CSAFPID-5877667",
                    "CSAFPID-5912502"
                ]
            },
            "references": [
                {
                    "category": "external",
                    "summary": "Source - github",
                    "url": "https://api.github.com/advisories/GHSA-38f7-945m-qr2g"
                },
                {
                    "category": "external",
                    "summary": "Source - nvd",
                    "url": "https://services.nvd.nist.gov/rest/json/cves/2.0?cveId=CVE-2026-32887"
                },
                {
                    "category": "external",
                    "summary": "Source - cveprojectv5",
                    "url": "https://raw.githubusercontent.com/CVEProject/cvelistV5/main/cves/2026/32xxx/CVE-2026-32887.json"
                },
                {
                    "category": "external",
                    "summary": "Source - first",
                    "url": "https://api.first.org/data/v1/epss?limit=10000&offset=0"
                },
                {
                    "category": "external",
                    "summary": "Source - osv",
                    "url": "https://www.googleapis.com/download/storage/v1/b/osv-vulnerabilities/o/npm%2FGHSA-38f7-945m-qr2g.json?alt=media"
                },
                {
                    "category": "external",
                    "summary": "Reference - cveprojectv5; github; nvd; osv",
                    "url": "https://github.com/Effect-TS/effect/security/advisories/GHSA-38f7-945m-qr2g"
                },
                {
                    "category": "external",
                    "summary": "Reference - github",
                    "url": "https://github.com/advisories/GHSA-38f7-945m-qr2g"
                },
                {
                    "category": "external",
                    "summary": "Reference - github; osv",
                    "url": "https://nvd.nist.gov/vuln/detail/CVE-2026-32887"
                }
            ],
            "scores": [
                {
                    "cvss_v3": {
                        "version": "3.1",
                        "vectorString": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:N",
                        "baseScore": 7.4,
                        "baseSeverity": "HIGH"
                    },
                    "products": [
                        "CSAFPID-5877667",
                        "CSAFPID-5912502"
                    ]
                }
            ],
            "title": "CVE-2026-32887"
        }
    ]
}