Implemented getModels endpoint with the models provided by the api.Fixed some typescript errors, created types file. Added consola for logging.

This commit is contained in:
2025-06-28 13:50:05 -05:00
parent 7c6dc2d818
commit 75dc51bcb1
8 changed files with 126 additions and 34 deletions

5
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,5 @@
{
"cSpell.ignorePaths" : [
"src"
],
}

2
package-lock.json generated
View File

@@ -10,6 +10,7 @@
"license": "MIT",
"dependencies": {
"@google/gemini-cli-core": "^0.1.7",
"consola": "^3.4.2",
"dotenv": "^17.0.0",
"zod": "^3.25.67"
},
@@ -2941,7 +2942,6 @@
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz",
"integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^14.18.0 || >=16.10.0"

View File

@@ -16,6 +16,7 @@
"description": "",
"dependencies": {
"@google/gemini-cli-core": "^0.1.7",
"consola": "^3.4.2",
"dotenv": "^17.0.0",
"zod": "^3.25.67"
},

View File

@@ -3,13 +3,21 @@ import {
AuthType,
createContentGeneratorConfig,
createContentGenerator,
ContentGenerator,
} from '@google/gemini-cli-core/dist/src/core/contentGenerator.js';
import {
DEFAULT_GEMINI_MODEL,
DEFAULT_GEMINI_FLASH_MODEL,
} from '@google/gemini-cli-core/dist/src/config/models.js';
import { Content, GeminiResponse, Model } from './types.js';
/* ------------------------------------------------------------------ */
/* 1. Build the ContentGenerator exactly like the CLI does */
/* ------------------------------------------------------------------ */
let modelName: string; // we'll fill this once
const generatorPromise = (async () => {
const generatorPromise: Promise<ContentGenerator> = (async () => {
// Pass undefined for model so the helper falls back to DEFAULT_GEMINI_MODEL
const cfg = await createContentGeneratorConfig(
undefined, // let helper pick default (Gemini-2.5-Pro)
@@ -28,27 +36,35 @@ export async function sendChat({
contents,
generationConfig = {},
}: {
contents: any[],
contents: Content[],
generationConfig?: GenConfig,
tools?: unknown, // accepted but ignored for now
}) {
const generator: any = await generatorPromise;
return await generator.generateContent({
}): Promise<GeminiResponse> {
const generator = await generatorPromise;
const gResp = await generator.generateContent({
model: modelName,
contents,
config: generationConfig,
});
return {
text: gResp.text ?? '',
usageMetadata: {
promptTokens: gResp.usageMetadata?.promptTokenCount ?? 0,
candidatesTokens: gResp.usageMetadata?.candidatesTokenCount ?? 0,
totalTokens: gResp.usageMetadata?.totalTokenCount ?? 0,
},
};
}
export async function* sendChatStream({
contents,
generationConfig = {},
}: {
contents: any[],
contents: Content[],
generationConfig?: GenConfig,
tools?: unknown,
}) {
const generator: any = await generatorPromise;
const generator = await generatorPromise;
const stream = await generator.generateContentStream({
model: modelName,
contents,
@@ -57,13 +73,24 @@ export async function* sendChatStream({
for await (const chunk of stream) yield chunk;
}
export function listModels(): Model[] {
return [
{
id: DEFAULT_GEMINI_MODEL,
object: 'model',
owned_by: 'google',
},
{
id: DEFAULT_GEMINI_FLASH_MODEL,
object: 'model',
owned_by: 'google',
},
];
}
/* ------------------------------------------------------------------ */
/* 3. Additional stubs to implement later */
/* ------------------------------------------------------------------ */
// export function listModels() {
// return [{ id: modelName }];
// }
// export async function embed(_input: unknown) {
// throw new Error('Embeddings endpoint not implemented yet.');
// }

View File

@@ -1,8 +1,9 @@
/* eslint-disable n/no-process-env */
import dotenv from 'dotenv';
dotenv.config();
export const config = {
// eslint-disable-next-line n/no-process-env
PORT: Number(process.env.PORT ?? 11434),
VERBOSE: Boolean(process.env.VERBOSE ?? true),
};

View File

@@ -4,6 +4,7 @@
import { fetchAndEncode } from './remoteimage';
import { z } from 'zod';
import { ToolRegistry } from '@google/gemini-cli-core/dist/src/tools/tool-registry.js';
import { RequestBody } from './types';
/* ------------------------------------------------------------------ */
interface Part { text?: string; inlineData?: { mimeType: string, data: string } }
@@ -16,14 +17,14 @@ function callLocalFunction(_name: string, _args: unknown) {
/* ================================================================== */
/* Request mapper: OpenAI ➞ Gemini */
/* ================================================================== */
export async function mapRequest(body: any) {
export async function mapRequest(body: RequestBody) {
const parts: Part[] = [];
/* ---- convert messages & vision --------------------------------- */
for (const m of body.messages) {
if (Array.isArray(m.content)) {
for (const item of m.content) {
if (item.type === 'image_url') {
if (item.type === 'image_url' && item.image_url) {
parts.push({ inlineData: await fetchAndEncode(item.image_url.url) });
} else if (item.type === 'text') {
parts.push({ text: item.text });

View File

@@ -1,10 +1,21 @@
import consola from 'consola';
import http from 'http';
import { sendChat, sendChatStream } from './chatwrapper';
import { mapRequest, mapResponse, mapStreamChunk } from './mapper';
import { listModels, sendChat, sendChatStream } from './chatwrapper';
import { mapRequest, mapResponse, mapStreamChunk } from './mapper.js';
import { RequestBody, GeminiResponse } from './types';
import { config } from './config';
/* ── basic config ─────────────────────────────────────────────────── */
const PORT = config.PORT;
const VERBOSE = config.VERBOSE;
/* ── Consola setup ────────────────────────────────────────────────── */
if (VERBOSE) {
consola.level = 5;
consola.info('Verbose logging enabled');
}
consola.info('Google CLI OpenAI proxy');
/* ── CORS helper ──────────────────────────────────────────────────── */
function allowCors(res: http.ServerResponse) {
@@ -17,7 +28,7 @@ function allowCors(res: http.ServerResponse) {
function readJSON(
req: http.IncomingMessage,
res: http.ServerResponse,
): Promise<any | null> {
): Promise<RequestBody | null> {
return new Promise((resolve) => {
let data = '';
req.on('data', (c) => (data += c));
@@ -34,9 +45,10 @@ function readJSON(
return resolve(null);
}
try {
resolve(JSON.parse(data));
resolve(JSON.parse(data) as RequestBody);
} catch {
res.writeHead(400, { 'Content-Type': 'application/json' }); // malformed JSON
// malformed JSON
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: 'Malformed JSON' } }));
resolve(null);
}
@@ -50,7 +62,7 @@ http
allowCors(res);
const url = new URL(req.url ?? '/', `http://${req.headers.host}`);
const pathname = url.pathname.replace(/\/$/, '') || '/';
console.log(`[proxy] ${req.method} ${url.pathname}`);
consola.info(`${req.method} ${url.pathname}`);
/* -------- pre-flight ---------- */
if (req.method === 'OPTIONS') {
@@ -63,13 +75,7 @@ http
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(
JSON.stringify({
data: [
{
id: 'gemini-2.5-pro',
object: 'model',
owned_by: 'google',
},
],
data: listModels(),
}),
);
return;
@@ -78,7 +84,8 @@ http
/* ---- /v1/chat/completions ---- */
if (
(pathname === '/chat/completions' ||
(pathname === '/v1/chat/completions' ) && req.method === 'POST')
pathname === '/v1/chat/completions') &&
req.method === 'POST'
) {
const body = await readJSON(req, res);
if (!body) return;
@@ -98,14 +105,15 @@ http
}
res.end('data: [DONE]\n\n');
} else {
const gResp = await sendChat({ ...geminiReq, tools });
const gResp: GeminiResponse = await sendChat({ ...geminiReq, tools });
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(mapResponse(gResp)));
}
} catch (err: any) {
console.error('Proxy error ➜', err);
} catch (err) {
const error = err as Error;
consola.error('Proxy error ➜', error);
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: err.message } }));
res.end(JSON.stringify({ error: { message: error.message } }));
}
return;
}
@@ -113,4 +121,6 @@ http
/* ---- anything else ---------- */
res.writeHead(404).end();
})
.listen(PORT, () => console.log(`OpenAI proxy on :${PORT}`));
.listen(PORT, () => {
consola.info(`Listening on port :${PORT}`);
});

47
src/types.ts Normal file
View File

@@ -0,0 +1,47 @@
/* ------------------------------------------------------------------ */
/* types.ts - Type definitions for the application */
/* ------------------------------------------------------------------ */
export interface Model {
id: string;
object: 'model';
owned_by: 'google';
}
export interface Part {
text?: string;
inlineData?: { mimeType: string, data: string };
}
export interface Content {
role: string;
parts: Part[];
}
export interface RequestBody {
messages: {
content:
| string
| { type: string, image_url?: { url: string }, text?: string }[],
}[];
temperature?: number;
max_tokens?: number;
top_p?: number;
generationConfig?: Record<string, unknown>;
include_reasoning?: boolean;
stream?: boolean;
functions?: {
name: string,
description?: string,
parameters?: {
properties?: Record<string, unknown>,
},
}[];
}
export interface GeminiResponse {
text: string;
usageMetadata?: {
promptTokens: number,
candidatesTokens: number,
totalTokens: number,
};
}