Added model and role translation. Rewrite of code's comments.

This commit is contained in:
2025-06-30 12:01:00 -05:00
parent 2370a798d1
commit 37f0c4b643
7 changed files with 379 additions and 109 deletions

View File

@@ -1,3 +1,7 @@
/**
* @fileoverview This file sets up and runs the HTTP server that acts as a
* proxy between an OpenAI-compatible client and the Gemini API.
*/
import consola from 'consola';
import http from 'http';
import { listModels, sendChat, sendChatStream } from './chatwrapper';
@@ -5,11 +9,15 @@ import { mapRequest, mapResponse, mapStreamChunk } from './mapper.js';
import { RequestBody, GeminiResponse, GeminiStreamChunk, Part } from './types';
import { config } from './config';
/* ── basic config ─────────────────────────────────────────────────── */
// ==================================================================
// Server Configuration
// ==================================================================
const PORT = config.PORT;
const VERBOSE = config.VERBOSE;
/* ── Consola setup ────────────────────────────────────────────────── */
// ==================================================================
// Logger Setup
// ==================================================================
if (VERBOSE) {
consola.level = 5;
consola.info('Verbose logging enabled');
@@ -17,14 +25,27 @@ if (VERBOSE) {
consola.info('Google CLI OpenAI proxy');
/* ── CORS helper ──────────────────────────────────────────────────── */
// ==================================================================
// HTTP Server Helpers
// ==================================================================
/**
* Sets CORS headers to allow cross-origin requests.
* @param res - The HTTP server response object.
*/
function allowCors(res: http.ServerResponse) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET,POST,OPTIONS');
}
/* ── JSON body helper ─────────────────────────────────────────────── */
/**
* Reads and parses a JSON request body.
* @param req - The HTTP incoming message object.
* @param res - The HTTP server response object.
* @returns A promise that resolves to the parsed request body
* or null if invalid.
*/
function readJSON(
req: http.IncomingMessage,
res: http.ServerResponse,
@@ -50,7 +71,7 @@ function readJSON(
try {
resolve(JSON.parse(data) as RequestBody);
} catch {
// malformed JSON
// Handle malformed JSON.
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: 'Malformed JSON' } }));
resolve(null);
@@ -60,7 +81,9 @@ function readJSON(
});
}
/* ── server ───────────────────────────────────────────────────────── */
// ==================================================================
// Main Server Logic
// ==================================================================
http
.createServer(async (req, res) => {
allowCors(res);
@@ -68,13 +91,13 @@ http
const pathname = url.pathname.replace(/\/$/, '') || '/';
consola.info(`${req.method} ${url.pathname}`);
/* -------- pre-flight ---------- */
// Handle pre-flight CORS requests.
if (req.method === 'OPTIONS') {
res.writeHead(204).end();
return;
}
/* -------- /v1/models ---------- */
// Route for listing available models.
if (pathname === '/v1/models' || pathname === '/models') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(
@@ -85,7 +108,7 @@ http
return;
}
/* ---- /v1/chat/completions ---- */
// Route for chat completions.
if (
(pathname === '/chat/completions' ||
pathname === '/v1/chat/completions') &&
@@ -105,32 +128,34 @@ http
});
for await (const chunk of sendChatStream({ ...geminiReq, tools })) {
// Transform the chunk to match our expected type
const transformedParts =
chunk.candidates?.[0]?.content?.parts?.map(part => {
const transformedPart: Part = {
text: part.text,
thought: part.text?.startsWith?.('<think>') ?? false,
};
if (part.inlineData?.data) {
transformedPart.inlineData = {
mimeType: part.inlineData.mimeType ?? 'text/plain',
data: part.inlineData.data,
// Transform the chunk to match the expected stream format.
const transformedParts =
chunk.candidates?.[0]?.content?.parts?.map((part) => {
const transformedPart: Part = {
text: part.text,
thought: part.text?.startsWith?.('<think>') ?? false,
};
}
return transformedPart;
}) ?? [];
if (part.inlineData?.data) {
transformedPart.inlineData = {
mimeType: part.inlineData.mimeType ?? 'text/plain',
data: part.inlineData.data,
};
}
return transformedPart;
}) ?? [];
const streamChunk: GeminiStreamChunk = {
candidates: [{
content: {
parts: transformedParts,
candidates: [
{
content: {
parts: transformedParts,
},
},
}],
],
};
res.write(
`data: ${JSON.stringify(mapStreamChunk(streamChunk))}\n\n`,
);
@@ -139,24 +164,26 @@ http
} else {
const gResp: GeminiResponse = await sendChat({ ...geminiReq, tools });
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(mapResponse(gResp)));
res.end(JSON.stringify(mapResponse(gResp, body)));
}
} catch (err) {
const error = err as Error;
consola.error('Proxy error ➜', error);
// For streaming responses, send error in stream format
// Handle errors, sending them in the appropriate format for streaming
// or non-streaming responses.
if (body.stream && res.headersSent) {
res.write(`data: ${JSON.stringify({
error: {
message: error.message,
type: 'error',
},
})}\n\n`);
res.write(
`data: ${JSON.stringify({
error: {
message: error.message,
type: 'error',
},
})}\n\n`,
);
res.end('data: [DONE]\n\n');
return;
} else {
// For non-streaming responses or if headers haven't been sent yet
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: error.message } }));
}