Transparent PII anonymization and rehydration for LLM API calls. Available in Node.js and Bun only.
Import
import {
wrapLLMClient,
createRehydraFetch,
createRehydraProxy,
createRehydraProxyServer,
SSEParser,
OpenAIProvider,
AnthropicProvider,
detectProvider,
} from 'rehydra/proxy';
wrapLLMClient()
Wraps an OpenAI or Anthropic SDK client for automatic anonymization/rehydration.
Signature
function wrapLLMClient<T>(
client: T,
config: Omit<RehydraFetchConfig, 'provider'>
): T
Parameters
An OpenAI or Anthropic SDK client instance
Returns
The same client type T, with fetch intercepted for anonymization.
Example
import OpenAI from 'openai';
import { wrapLLMClient } from 'rehydra/proxy';
import { InMemoryKeyProvider, SQLitePIIStorageProvider } from 'rehydra';
const storage = new SQLitePIIStorageProvider('./pii.db');
await storage.initialize();
const client = wrapLLMClient(new OpenAI(), {
anonymizer: { ner: { mode: 'quantized' } },
keyProvider: new InMemoryKeyProvider(),
piiStorageProvider: storage,
getSessionId: () => 'session-1',
});
createRehydraFetch()
Creates a drop-in fetch replacement that anonymizes requests and rehydrates responses.
Signature
function createRehydraFetch(config: RehydraFetchConfig): typeof globalThis.fetch
Parameters
config
RehydraFetchConfig
required
Returns
A fetch-compatible function.
Example
import { createRehydraFetch } from 'rehydra/proxy';
import { InMemoryKeyProvider, SQLitePIIStorageProvider } from 'rehydra';
const storage = new SQLitePIIStorageProvider('./pii.db');
await storage.initialize();
const fetch = createRehydraFetch({
anonymizer: { ner: { mode: 'quantized' } },
keyProvider: new InMemoryKeyProvider(),
piiStorageProvider: storage,
provider: 'openai', // or 'anthropic', or auto-detected
getSessionId: () => 'session-1',
});
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
body: JSON.stringify({ model: 'gpt-4', messages: [...] }),
});
createRehydraProxy()
Creates an HTTP proxy middleware using the Web Request/Response API.
Signature
function createRehydraProxy(
config: RehydraProxyConfig
): (request: Request) => Promise<Response>
Parameters
config
RehydraProxyConfig
required
Returns
A function (request: Request) => Promise<Response> compatible with Bun.serve, Hono, and other frameworks using Web APIs.
Example
import { createRehydraProxy } from 'rehydra/proxy';
import { InMemoryKeyProvider, SQLitePIIStorageProvider } from 'rehydra';
const storage = new SQLitePIIStorageProvider('./pii.db');
await storage.initialize();
const proxy = createRehydraProxy({
upstream: 'https://api.openai.com',
anonymizer: { ner: { mode: 'quantized' } },
keyProvider: new InMemoryKeyProvider(),
piiStorageProvider: storage,
getSessionId: () => 'session-1',
});
Bun.serve({ port: 8080, fetch: proxy });
createRehydraProxyServer()
Creates and starts a standalone HTTP proxy server.
Signature
function createRehydraProxyServer(
config: RehydraProxyServerConfig
): Promise<RehydraProxyServer>
Parameters
config
RehydraProxyServerConfig
required
Extends RehydraProxyConfig with:Show Additional properties
host
string
default:"'127.0.0.1'"
Host to bind to
Returns
interface RehydraProxyServer {
server: http.Server;
port: number;
host: string;
close(): Promise<void>;
}
Example
import { createRehydraProxyServer } from 'rehydra/proxy';
import { InMemoryKeyProvider, SQLitePIIStorageProvider } from 'rehydra';
const storage = new SQLitePIIStorageProvider('./pii.db');
await storage.initialize();
const server = await createRehydraProxyServer({
upstream: 'https://api.openai.com',
port: 8080,
anonymizer: { ner: { mode: 'quantized' } },
keyProvider: new InMemoryKeyProvider(),
piiStorageProvider: storage,
getSessionId: () => 'session-1',
});
// Point clients at http://localhost:8080/v1
await server.close();
Configuration Types
RehydraFetchConfig
interface RehydraFetchConfig {
// Encryption key provider (required)
keyProvider: KeyProvider;
// Storage for session persistence (required)
piiStorageProvider: PIIStorageProvider;
// Anonymizer configuration (includes tagFormat)
anonymizer?: AnonymizerConfig;
// Policy override
policy?: Partial<AnonymizationPolicy>;
// Provider hint (auto-detected if omitted)
provider?: 'openai' | 'anthropic' | 'auto';
// Derive session ID from request
getSessionId?: (request: Request) => string | Promise<string>;
// Handle streaming responses (default: true)
handleStreaming?: boolean;
// Locale hint
locale?: string;
// Tool execution callback (non-streaming only)
// Arguments are rehydrated before calling; return value is anonymized
onToolCall?: (
name: string,
args: Record<string, unknown>,
toolCallId: string,
) => unknown | Promise<unknown>;
// Maximum tool execution rounds (default: 10)
maxToolRounds?: number;
// PII system instruction: string to override, false to disable, undefined for default
systemInstruction?: string | false;
}
RehydraProxyConfig
Extends RehydraFetchConfig with:
interface RehydraProxyConfig extends RehydraFetchConfig {
// Upstream LLM API URL (e.g., 'https://api.openai.com')
upstream: string;
// Headers to forward to upstream
// Default: ['authorization', 'content-type', 'x-api-key',
// 'anthropic-version', 'openai-organization', 'openai-project']
forwardHeaders?: string[];
// Strip this prefix from the request path before forwarding
stripPrefix?: string;
// API key to inject into upstream requests
// Adds Authorization header (OpenAI) or x-api-key header (Anthropic)
apiKey?: string;
}
Providers
detectProvider()
Auto-detect the LLM provider from request URL and headers.
function detectProvider(
url: string,
headers: Headers,
hint?: string
): LLMContentProvider
LLMContentProvider Interface
interface LLMContentProvider {
readonly name: string;
// Request detection
matchesRequest(url: string, headers: Headers): boolean;
isStreamingRequest(body: unknown): boolean;
// Request handling
extractRequestText(body: unknown): string[];
rebuildRequestBody(body: unknown, anonymizedTexts: string[]): unknown;
// Non-streaming response handling
extractResponseText(body: unknown): string[];
rebuildResponseBody(body: unknown, rehydratedTexts: string[]): unknown;
// Streaming content handling
extractSSEDelta(data: unknown): string | null;
rebuildSSEDelta(data: unknown, rehydratedText: string): unknown;
// Tool/function call handling (non-streaming)
extractResponseToolCalls?(body: unknown): string[];
rebuildResponseToolCalls?(body: unknown, rehydratedArgs: string[]): unknown;
// Tool/function call handling (streaming)
extractSSEToolCallDeltas?(data: unknown): ToolCallDelta[] | null;
rebuildSSEToolCallDeltas?(data: unknown, rehydratedArgs: Map<number, string>): unknown;
extractSSEToolCallStop?(data: unknown): number | null;
}
interface ToolCallDelta {
index: number;
arguments: string;
}
interface SSEEvent {
event: string; // defaults to 'message'
data: string;
}
Built-in Providers
| Provider | Matches |
|---|
OpenAIProvider | api.openai.com URL or Bearer sk- auth header |
AnthropicProvider | api.anthropic.com URL or x-api-key/anthropic-version headers |
SSEParser
Parser for Server-Sent Events streams.
const parser = new SSEParser();
// Parse SSE text
const events = parser.parse(chunk);
// [{ event: 'message', data: '...' }, ...]
// Check for stream end
import { isSSEDone } from 'rehydra/proxy';
if (isSSEDone(event.data)) { /* stream complete */ }
// Serialize back to SSE format
import { serializeSSEEvent } from 'rehydra/proxy';
const text = serializeSSEEvent({ event: 'message', data: '...' });