3 Commits

Author SHA1 Message Date
f83a1b3957 Tag v0.0.5 2025-06-30 18:31:19 -05:00
af3a52bac6 feat(api): add model field and root endpoint
Add a model field to the gemini request mapping and implement a new
root endpoint that returns a plain text status message.
2025-06-30 18:30:51 -05:00
e7eb40ba4e Added donation section to README 2025-06-30 16:53:44 -05:00
5 changed files with 20 additions and 4 deletions

View File

@@ -10,6 +10,15 @@ This project provides a lightweight proxy server that translates OpenAI API requ
* **Docker Support:** Includes `Dockerfile` and `docker-compose.yml` for easy containerized deployment. * **Docker Support:** Includes `Dockerfile` and `docker-compose.yml` for easy containerized deployment.
* **Hugging Face Spaces Ready:** Can be easily deployed as a Hugging Face Space. * **Hugging Face Spaces Ready:** Can be easily deployed as a Hugging Face Space.
## Support the Project
If you find this project useful, consider supporting its development:
[![Donate using Liberapay][liberapay-logo]][liberapay-link]
[liberapay-logo]: https://liberapay.com/assets/widgets/donate.svg "Liberapay Logo"
[liberapay-link]: https://liberapay.com/sfiorini/donate
## Prerequisites ## Prerequisites
Before you begin, ensure you have the following installed: Before you begin, ensure you have the following installed:

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "gemini-cli-openai-api", "name": "gemini-cli-openai-api",
"version": "0.0.4", "version": "0.0.5",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "gemini-cli-openai-api", "name": "gemini-cli-openai-api",
"version": "0.0.4", "version": "0.0.5",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@google/gemini-cli-core": "^0.1.7", "@google/gemini-cli-core": "^0.1.7",

View File

@@ -1,6 +1,6 @@
{ {
"name": "gemini-cli-openai-api", "name": "gemini-cli-openai-api",
"version": "0.0.4", "version": "0.0.5",
"main": "server.ts", "main": "server.ts",
"scripts": { "scripts": {
"build": "tsdown", "build": "tsdown",

View File

@@ -118,6 +118,7 @@ export async function mapRequest(body: RequestBody) {
return { return {
geminiReq: { geminiReq: {
model: body.model,
contents, contents,
generationConfig, generationConfig,
stream: body.stream, stream: body.stream,

View File

@@ -24,7 +24,7 @@ if (VERBOSE) {
consola.info('Verbose logging enabled'); consola.info('Verbose logging enabled');
} }
consola.info('Google CLI OpenAI proxy'); consola.info('Google CLI OpenAI API');
// ================================================================== // ==================================================================
// HTTP Server Helpers // HTTP Server Helpers
@@ -101,6 +101,12 @@ ensureOAuthCredentials()
return; return;
} }
if (pathname === '/') {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Google CLI OpenAI API server is running......');
return;
}
if (!isAuthorized(req, res)) { if (!isAuthorized(req, res)) {
return; return;
} }