From f286ab3d3818fc9fdd30e9c00da0f521266e8152 Mon Sep 17 00:00:00 2001 From: Stefano Date: Mon, 30 Jun 2025 16:45:49 -0500 Subject: [PATCH] feat(auth): add auto-generation of oauth credentials Implement functionality to create the oauth_creds.json file from environment variables (ACCESS_TOKEN, REFRESH_TOKEN, EXPIRY_DATE) if the file is missing. Also update documentation, docker-compose, and build scripts to support this new feature. --- .dockerignore | 31 +++++++ .env.example | 9 +- Dockerfile | 26 ++++++ README.md | 206 +++++++++++++++++++++++++++++++-------------- docker-compose.yml | 20 +++++ package.json | 8 +- src/auth.ts | 39 +++++++++ src/config.ts | 17 ++++ src/server.ts | 181 +++++++++++++++++++++------------------ 9 files changed, 386 insertions(+), 151 deletions(-) create mode 100644 .dockerignore create mode 100644 Dockerfile create mode 100644 docker-compose.yml diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..0a3241d --- /dev/null +++ b/.dockerignore @@ -0,0 +1,31 @@ +# Dependencies +node_modules/ +npm-debug.log +yarn-debug.log* +yarn-error.log* + +# Environment variables +.env +.env.example + +# Build output +dist/ +build/ +coverage/ + +# Development +profile/ +*.test.ts +*.spec.ts + +# Version control +.git/ +.gitignore + +# IDE +.vscode/ +.idea/ + +# Docker +Dockerfile +docker-compose.yml \ No newline at end of file diff --git a/.env.example b/.env.example index 5361eab..4cf7dec 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,10 @@ PORT=11434 VERBOSE=false -API_KEY=MY0P3NA1K3Y \ No newline at end of file +API_KEY=MY0P3NA1K3Y +ACCESS_TOKEN=MYACC3SS_T0K3N +REFRESH_TOKEN=MYR3FR3SH_T0K3N +EXPIRY_DATE=1234567890 +# Docker +DOCKER_REGISTRY= +DOCKER_REGISTRY_USER= +DOCKER_HUB_USER= \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c066622 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +# Use an official Node.js runtime as a parent image +FROM node:22.15-slim + +# Set the working directory in the container +WORKDIR /usr/src/app + +# Create directory for oauth credentials +RUN mkdir -p /root/.gemini + +# Copy package.json and package-lock.json to the working directory +COPY package*.json ./ + +# Install any needed packages specified in package.json +RUN npm install + +# Bundle app source +COPY . . + +# Build the typescript code +RUN npm run build + +# Make port 4343 available to the world outside this container +EXPOSE 4343 + +# Define the command to run the app +CMD [ "npm", "start" ] \ No newline at end of file diff --git a/README.md b/README.md index d05d55c..cb4d056 100644 --- a/README.md +++ b/README.md @@ -1,76 +1,154 @@ -# Gemini ↔︎ OpenAI Proxy +# Gemini CLI OpenAI API Proxy -Serve **Google Gemini 2.5 Pro** (or Flash) through an **OpenAI-compatible API**. -Plug-and-play with clients that already speak OpenAI—SillyTavern, llama.cpp, LangChain, the VS Code *Cline* extension, etc. +This project provides a lightweight proxy server that translates OpenAI API requests to the Google Gemini API, utilizing the `@google/gemini-cli` for authentication and request handling. ---- +## Features -## ✨ Features +* **OpenAI API Compatibility:** Acts as a drop-in replacement for services that use the OpenAI API format. +* **Google Gemini Integration:** Leverages the power of Google's Gemini models. +* **Authentication:** Uses `gemini-cli` for secure OAuth2 authentication with Google. +* **Docker Support:** Includes `Dockerfile` and `docker-compose.yml` for easy containerized deployment. +* **Hugging Face Spaces Ready:** Can be easily deployed as a Hugging Face Space. -| ✔ | Feature | Notes | -|---|---------|-------| -| `/v1/chat/completions` | Non-stream & stream (SSE) | Works with curl, ST, LangChain… | -| Vision support | `image_url` → Gemini `inlineData` | | -| Function / Tool calling | OpenAI “functions” → Gemini Tool Registry | | -| Reasoning / chain-of-thought | Sends `enable_thoughts:true`, streams `` chunks | ST shows grey bubbles | -| 1 M-token context | Proxy auto-lifts Gemini CLI’s default 200 k cap | | -| CORS | Enabled (`*`) by default | Ready for browser apps | -| Zero external deps | Node 22 + TypeScript only | No Express | +## Prerequisites ---- +Before you begin, ensure you have the following installed: -## 🚀 Quick start (local) +* [Node.js](https://nodejs.org/) (v18 or higher) +* [npm](https://www.npmjs.com/) +* [Docker](https://www.docker.com/) (for containerized deployment) +* [Git](https://git-scm.com/) + +## Local Installation and Setup + +1. **Clone the repository:** + + ```bash + git clone https://github.com/your-username/gemini-cli-openai-api.git + cd gemini-cli-openai-api + ``` + +2. **Install project dependencies:** + + ```bash + npm install + ``` + +3. **Install the Gemini CLI and Authenticate:** + + This is a crucial step to authenticate with your Google account and generate the necessary credentials. + + ```bash + npm install -g @google/gemini-cli + gemini auth login + ``` + + Follow the on-screen instructions to log in with your Google account. This will create a file at `~/.gemini/oauth_creds.json` containing your authentication tokens. + +4. **Configure Environment Variables:** + + Create a `.env` file by copying the example file: + + ```bash + cp .env.example .env + ``` + + Open the `.env` file and set the following variables: + + * `PORT`: The port the server will run on (default: `11434`). + * `API_KEY`: A secret key to protect your API endpoint. You can generate a strong random string for this. + +## Running the Project + +### Development Mode + +To run the server in development mode with hot-reloading: ```bash -git clone https://huggingface.co/engineofperplexity/gemini-openai-proxy -cd gemini-openai-proxy -npm ci # install deps & ts-node +npm run dev +``` -# launch on port 11434 -npx ts-node src/server.ts -Optional env vars -PORT=3000 change listen port -GEMINI_API_KEY= use your own key +The server will be accessible at `http://localhost:11434` (or the port you specified). -Minimal curl test -bash -Copy -Edit -curl -X POST http://localhost:11434/v1/chat/completions \ - -H "Content-Type: application/json" \ - -d '{ - "model": "gemini-2.5-pro-latest", - "messages":[{"role":"user","content":"Hello Gemini!"}] - }' -SillyTavern settings -Field Value -API Base URL http://127.0.0.1:11434/v1 -Model gemini-2.5-pro-latest -Streaming On -Reasoning On → grey lines appear +### Production Mode -🐳 Docker -bash -Copy -Edit -# build once -docker build -t gemini-openai-proxy . +To build and run the server in production mode: -# run -docker run -p 11434:11434 \ - -e GEMINI_API_KEY=$GEMINI_API_KEY \ - gemini-openai-proxy -🗂 Project layout -pgsql -Copy -Edit -src/ - server.ts – minimalist HTTP server - mapper.ts – OpenAI ⇄ Gemini transforms - chatwrapper.ts – thin wrapper around @google/genai - remoteimage.ts – fetch + base64 for vision -package.json – deps & scripts -Dockerfile -README.md -📜 License -MIT – free for personal & commercial use. \ No newline at end of file +```bash +npm run build +npm start +``` + +## Docker Deployment + +### Using Docker Compose + +The easiest way to deploy the project with Docker is by using the provided `docker-compose.yml` file. + +1. **Authentication:** + + The Docker container needs access to your OAuth credentials. You have two options: + + * **Option A (Recommended): Mount the credentials file.** + Uncomment the `volumes` section in `docker-compose.yml` to mount your local `oauth_creds.json` file into the container. + + ```yaml + volumes: + - ~/.gemini/oauth_creds.json:/root/.gemini/oauth_creds.json + ``` + + * **Option B: Use environment variables.** + If you cannot mount the file, you can set the `ACCESS_TOKEN`, `REFRESH_TOKEN`, and `EXPIRY_DATE` environment variables in the `docker-compose.yml` file. You can get these values from your `~/.gemini/oauth_creds.json` file. + +2. **Configure `docker-compose.yml`:** + + Open `docker-compose.yml` and set the `API_KEY` and other environment variables as needed. + +3. **Start the container:** + + ```bash + docker-compose up -d + ``` + + The server will be running on the port specified in the `ports` section of the `docker-compose.yml` file (e.g., `4343`). + +### Building the Docker Image Manually + +If you need to build the Docker image yourself: + +```bash +docker build -t gemini-cli-openai-api . +``` + +Then you can run the container with the appropriate environment variables and volume mounts. + +## Hugging Face Spaces Deployment + +You can deploy this project as a Docker Space on Hugging Face. + +1. **Create a new Space:** + * Go to [huggingface.co/new-space](https://huggingface.co/new-space). + * Choose a name for your space. + * Select "Docker" as the Space SDK. + * Choose "From scratch". + * Create the space. + +2. **Upload the project files:** + * Upload all the project files (including the `Dockerfile`) to your new Hugging Face Space repository. You can do this via the web interface or by cloning the space's repository and pushing the files. + +3. **Configure Secrets:** + * In your Space's settings, go to the "Secrets" section. + * Add the following secrets. You can get the values for the first three from your `~/.gemini/oauth_creds.json` file. + * `ACCESS_TOKEN`: Your Google OAuth access token. + * `REFRESH_TOKEN`: Your Google OAuth refresh token. + * `EXPIRY_DATE`: The expiry date of your access token. + * `API_KEY`: The secret API key you want to use to protect your endpoint. + * `PORT`: The port the application should run on inside the container (e.g., `7860`, which is a common default for Hugging Face Spaces). + +4. **Update Dockerfile (if necessary):** + * The provided `Dockerfile` exposes port `4343`. If Hugging Face requires a different port (like `7860`), you may need to update the `EXPOSE` instruction in the `Dockerfile`. + +5. **Deploy:** + * Hugging Face Spaces will automatically build and deploy your Docker container when you push changes to the repository. Check the "Logs" to monitor the build and deployment process. + +Your Gemini-powered OpenAI proxy will now be running on your Hugging Face Space! diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..d27dcd8 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,20 @@ +version: '3.8' +services: + gemini-cli-openai-api: + container_name: gemini-cli-openai-api + image: sfiorini/gemini-cli-openai-api:latest + ports: + - "4343:4343" + # Enable sharing a pre existing OAuth credentials file + # to avoid the need to set environment variables. + # volumes: + # - ~/.gemini/oauth_creds.json:/root/.gemini/oauth_creds.json + environment: + - TZ=America/Chicago + - PORT=4343 + - VERBOSE=false + - API_KEY=MY0P3NA1K3Y + - ACCESS_TOKEN=MYACC3SS_T0K3N + - REFRESH_TOKEN=MYR3FR3SH_T0K3N + - EXPIRY_DATE=1234567890 + restart: unless-stopped \ No newline at end of file diff --git a/package.json b/package.json index f4ee390..5c6e1f6 100644 --- a/package.json +++ b/package.json @@ -7,12 +7,16 @@ "bump-release": "bumpp", "dev": "tsx watch ./src/server.ts", "docker": "npm run docker:build && npm run docker:push", - "docker:build": "npm run docker:build:version && npm run docker:tag:latest", + "docker:build": "npm run docker:build:version && npm run docker:tag:latest && npm run docker:build:du:version && npm run docker:tag:du:latest", "docker:build:version": "dotenv -- bash -c 'docker build -t $DOCKER_REGISTRY/$DOCKER_REGISTRY_USER/$npm_package_name:v$npm_package_version .'", - "docker:push": "npm run docker:push:version && npm run docker:push:latest", + "docker:build:du:version": "dotenv -- bash -c 'docker build -t $DOCKER_HUB_USER/$npm_package_name:v$npm_package_version .'", + "docker:push": "npm run docker:push:version && npm run docker:push:latest && npm run docker:push:du:version && npm run docker:push:du:latest", "docker:push:latest": "dotenv -- bash -c 'docker push $DOCKER_REGISTRY/$DOCKER_REGISTRY_USER/$npm_package_name:latest'", + "docker:push:du:latest": "dotenv -- bash -c 'docker push $DOCKER_HUB_USER/$npm_package_name:latest'", "docker:push:version": "dotenv -- bash -c 'docker push $DOCKER_REGISTRY/$DOCKER_REGISTRY_USER/$npm_package_name:v$npm_package_version'", + "docker:push:du:version": "dotenv -- bash -c 'docker push $DOCKER_HUB_USER/$npm_package_name:v$npm_package_version'", "docker:tag:latest": "dotenv -- bash -c 'docker tag $DOCKER_REGISTRY/$DOCKER_REGISTRY_USER/$npm_package_name:v$npm_package_version $DOCKER_REGISTRY/$DOCKER_REGISTRY_USER/$npm_package_name:latest'", + "docker:tag:du:latest": "dotenv -- bash -c 'docker tag $DOCKER_HUB_USER/$npm_package_name:v$npm_package_version $DOCKER_HUB_USER/$npm_package_name:latest'", "start": "node ./dist/server.js", "knip": "knip", "lint": "eslint --fix ." diff --git a/src/auth.ts b/src/auth.ts index 0a111b9..1df6974 100644 --- a/src/auth.ts +++ b/src/auth.ts @@ -3,6 +3,45 @@ */ import http from 'http'; import { config } from './config'; +import fs from 'fs/promises'; +import path from 'path'; +import os from 'os'; +import consola from 'consola'; + +/** + * Ensures that the OAuth credentials file exists if the required environment + * variables are present. + */ +export async function ensureOAuthCredentials(): Promise { + const geminiDir = path.join(os.homedir(), '.gemini'); + const credsPath = path.join(geminiDir, 'oauth_creds.json'); + + try { + await fs.access(credsPath); + consola.info(`OAuth credentials file already exists at ${credsPath}`); + } catch { + consola.info(`OAuth credentials file not found at ${credsPath}.`); + if (config.ACCESS_TOKEN && config.REFRESH_TOKEN && config.EXPIRY_DATE) { + consola.info('Creating OAuth credentials file' + + ' from environment variables.'); + await fs.mkdir(geminiDir, { recursive: true }); + const creds = { + access_token: config.ACCESS_TOKEN, + refresh_token: config.REFRESH_TOKEN, + token_type: 'Bearer', + expiry_date: config.EXPIRY_DATE, + }; + await fs.writeFile(credsPath, JSON.stringify(creds, null, 2)); + consola.info(`Successfully created ${credsPath}`); + } else { + consola.error( + 'OAuth credentials file is missing and one or more required ' + + 'environment variables: ACCESS_TOKEN, REFRESH_TOKEN, EXPIRY_DATE.', + ); + throw new Error('Missing OAuth credentials or environment variables.'); + } + } +} /** * Checks for API key authentication. diff --git a/src/config.ts b/src/config.ts index 87e7bdd..f68e9c2 100644 --- a/src/config.ts +++ b/src/config.ts @@ -29,4 +29,21 @@ export const config = { * @type {string | undefined} */ API_KEY: process.env.API_KEY, + /** + * The access token for OAuth. + * @type {string | undefined} + */ + ACCESS_TOKEN: process.env.ACCESS_TOKEN, + /** + * The refresh token for OAuth. + * @type {string | undefined} + */ + REFRESH_TOKEN: process.env.REFRESH_TOKEN, + /** + * The expiry date for the access token. + * @type {number | undefined} + */ + EXPIRY_DATE: process.env.EXPIRY_DATE + ? Number(process.env.EXPIRY_DATE) + : undefined, }; \ No newline at end of file diff --git a/src/server.ts b/src/server.ts index 0c290d4..adad12f 100644 --- a/src/server.ts +++ b/src/server.ts @@ -8,7 +8,7 @@ import { listModels, sendChat, sendChatStream } from './chatwrapper'; import { mapRequest, mapResponse, mapStreamChunk } from './mapper.js'; import { RequestBody, GeminiResponse, GeminiStreamChunk, Part } from './types'; import { config } from './config'; -import { isAuthorized } from './auth'; +import { isAuthorized, ensureOAuthCredentials } from './auth'; // ================================================================== // Server Configuration @@ -85,56 +85,60 @@ function readJSON( // ================================================================== // Main Server Logic // ================================================================== -http - .createServer(async (req, res) => { - allowCors(res); - const url = new URL(req.url ?? '/', `http://${req.headers.host}`); - const pathname = url.pathname.replace(/\/$/, '') || '/'; - consola.info(`${req.method} ${url.pathname}`); - // Handle pre-flight CORS requests. - if (req.method === 'OPTIONS') { - res.writeHead(204).end(); - return; - } +ensureOAuthCredentials() + .then(() => { + http + .createServer(async (req, res) => { + allowCors(res); + const url = new URL(req.url ?? '/', `http://${req.headers.host}`); + const pathname = url.pathname.replace(/\/$/, '') || '/'; + consola.info(`${req.method} ${url.pathname}`); - if (!isAuthorized(req, res)) { - return; - } + // Handle pre-flight CORS requests. + if (req.method === 'OPTIONS') { + res.writeHead(204).end(); + return; + } - // Route for listing available models. - if (pathname === '/v1/models' || pathname === '/models') { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end( - JSON.stringify({ - data: listModels(), - }), - ); - return; - } + if (!isAuthorized(req, res)) { + return; + } - // Route for chat completions. - if ( - (pathname === '/chat/completions' || + // Route for listing available models. + if (pathname === '/v1/models' || pathname === '/models') { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end( + JSON.stringify({ + data: listModels(), + }), + ); + return; + } + + // Route for chat completions. + if ( + (pathname === '/chat/completions' || pathname === '/v1/chat/completions') && req.method === 'POST' - ) { - const body = await readJSON(req, res); - if (!body) return; + ) { + const body = await readJSON(req, res); + if (!body) return; - try { - const { geminiReq, tools } = await mapRequest(body); + try { + const { geminiReq, tools } = await mapRequest(body); - if (body.stream) { - res.writeHead(200, { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - }); + if (body.stream) { + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + Connection: 'keep-alive', + }); - for await (const chunk of sendChatStream({ ...geminiReq, tools })) { - // Transform the chunk to match the expected stream format. - const transformedParts = + for await ( + const chunk of sendChatStream({ ...geminiReq, tools })) { + // Transform the chunk to match the expected stream format. + const transformedParts = chunk.candidates?.[0]?.content?.parts?.map((part) => { const transformedPart: Part = { text: part.text, @@ -151,50 +155,59 @@ http return transformedPart; }) ?? []; - const streamChunk: GeminiStreamChunk = { - candidates: [ - { - content: { - parts: transformedParts, - }, - }, - ], - }; + const streamChunk: GeminiStreamChunk = { + candidates: [ + { + content: { + parts: transformedParts, + }, + }, + ], + }; - res.write( - `data: ${JSON.stringify(mapStreamChunk(streamChunk))}\n\n`, - ); - } - res.end('data: [DONE]\n\n'); - } else { - const gResp: GeminiResponse = await sendChat({ ...geminiReq, tools }); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(mapResponse(gResp, body))); - } - } catch (err) { - const error = err as Error; - consola.error('Proxy error ➜', error); + res.write( + `data: ${JSON.stringify(mapStreamChunk(streamChunk))}\n\n`, + ); + } + res.end('data: [DONE]\n\n'); + } else { + const gResp: GeminiResponse = + await sendChat({ ...geminiReq, tools }); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(mapResponse(gResp, body))); + } + } catch (err) { + const error = err as Error; + consola.error('Proxy error ➜', error); - // Handle errors, sending them in the appropriate format for streaming - // or non-streaming responses. - if (body.stream && res.headersSent) { - res.write( - `data: ${JSON.stringify({ - error: { - message: error.message, - type: 'error', - }, - })}\n\n`, - ); - res.end('data: [DONE]\n\n'); - return; - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: { message: error.message } })); + // Handle errors, sending them in the appropriate + // format for streaming or non-streaming responses. + if (body.stream && res.headersSent) { + res.write( + `data: ${JSON.stringify({ + error: { + message: error.message, + type: 'error', + }, + })}\n\n`, + ); + res.end('data: [DONE]\n\n'); + return; + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: { message: error.message } })); + } + } } - } - } + }) + .listen(PORT, () => { + consola.info(`Listening on port :${PORT}`); + }); }) - .listen(PORT, () => { - consola.info(`Listening on port :${PORT}`); + .catch((err: unknown) => { + if (err instanceof Error) { + consola.error(err.message); + } else { + consola.error('An unknown error occurred during startup.'); + } });