Implemented getModels endpoint with the models provided by the api.Fixed some typescript errors, created types file. Added consola for logging.

This commit is contained in:
2025-06-28 13:50:05 -05:00
parent 7c6dc2d818
commit 75dc51bcb1
8 changed files with 126 additions and 34 deletions

View File

@@ -1,10 +1,21 @@
import consola from 'consola';
import http from 'http';
import { sendChat, sendChatStream } from './chatwrapper';
import { mapRequest, mapResponse, mapStreamChunk } from './mapper';
import { listModels, sendChat, sendChatStream } from './chatwrapper';
import { mapRequest, mapResponse, mapStreamChunk } from './mapper.js';
import { RequestBody, GeminiResponse } from './types';
import { config } from './config';
/* ── basic config ─────────────────────────────────────────────────── */
const PORT = config.PORT;
const VERBOSE = config.VERBOSE;
/* ── Consola setup ────────────────────────────────────────────────── */
if (VERBOSE) {
consola.level = 5;
consola.info('Verbose logging enabled');
}
consola.info('Google CLI OpenAI proxy');
/* ── CORS helper ──────────────────────────────────────────────────── */
function allowCors(res: http.ServerResponse) {
@@ -17,7 +28,7 @@ function allowCors(res: http.ServerResponse) {
function readJSON(
req: http.IncomingMessage,
res: http.ServerResponse,
): Promise<any | null> {
): Promise<RequestBody | null> {
return new Promise((resolve) => {
let data = '';
req.on('data', (c) => (data += c));
@@ -34,9 +45,10 @@ function readJSON(
return resolve(null);
}
try {
resolve(JSON.parse(data));
resolve(JSON.parse(data) as RequestBody);
} catch {
res.writeHead(400, { 'Content-Type': 'application/json' }); // malformed JSON
// malformed JSON
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: 'Malformed JSON' } }));
resolve(null);
}
@@ -50,7 +62,7 @@ http
allowCors(res);
const url = new URL(req.url ?? '/', `http://${req.headers.host}`);
const pathname = url.pathname.replace(/\/$/, '') || '/';
console.log(`[proxy] ${req.method} ${url.pathname}`);
consola.info(`${req.method} ${url.pathname}`);
/* -------- pre-flight ---------- */
if (req.method === 'OPTIONS') {
@@ -63,13 +75,7 @@ http
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(
JSON.stringify({
data: [
{
id: 'gemini-2.5-pro',
object: 'model',
owned_by: 'google',
},
],
data: listModels(),
}),
);
return;
@@ -78,7 +84,8 @@ http
/* ---- /v1/chat/completions ---- */
if (
(pathname === '/chat/completions' ||
(pathname === '/v1/chat/completions' ) && req.method === 'POST')
pathname === '/v1/chat/completions') &&
req.method === 'POST'
) {
const body = await readJSON(req, res);
if (!body) return;
@@ -98,14 +105,15 @@ http
}
res.end('data: [DONE]\n\n');
} else {
const gResp = await sendChat({ ...geminiReq, tools });
const gResp: GeminiResponse = await sendChat({ ...geminiReq, tools });
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(mapResponse(gResp)));
}
} catch (err: any) {
console.error('Proxy error ➜', err);
} catch (err) {
const error = err as Error;
consola.error('Proxy error ➜', error);
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: err.message } }));
res.end(JSON.stringify({ error: { message: error.message } }));
}
return;
}
@@ -113,4 +121,6 @@ http
/* ---- anything else ---------- */
res.writeHead(404).end();
})
.listen(PORT, () => console.log(`OpenAI proxy on :${PORT}`));
.listen(PORT, () => {
consola.info(`Listening on port :${PORT}`);
});