diff --git a/README.md b/README.md index b50cc7d..1cd1569 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,13 @@ You can press the 'reverse' button to give input as Natural Language and get SQL OPENAI_API_KEY=$YOUR_API_KEY ``` + Optionally, if you would like to point to a custom OPENAI compatible API endpoint and model name, you can input it in the `.env` file. For example, if you would like to run locally using Ollama using `llama3.2` model, you could add + + ```bash + OPENAI_API_ENDPOINT_URL=http://localhost:11434 + MODEL_ID=llama3.2 + ``` + 5. Start the development server: ```bash @@ -74,12 +81,19 @@ You can press the 'reverse' button to give input as Natural Language and get SQL git clone https://github.com/whoiskatrin/sql-translator.git ``` -2. Input your OPENAI API key in the .env.production file, you can get your API key [here](https://beta.openai.com/account/api-keys): +2. Input your OPENAI API key in the `.env.production` file, you can get your API key [here](https://beta.openai.com/account/api-keys): ```bash OPENAI_API_KEY=$YOUR_API_KEY ``` + Optionally, if you would like to point to a custom OPENAI compatible API endpoint and model name, you can input it in the `.env.production` file. For example, if you would like to run locally using Ollama using `llama3.2` model, you could add + + ```bash + OPENAI_API_ENDPOINT_URL=http://host.docker.internal:11434 + MODEL_ID=llama3.2 + ``` + 3. Start the development server: ```bash diff --git a/pages/api/sql-to-human.ts b/pages/api/sql-to-human.ts index 031c355..afb8834 100644 --- a/pages/api/sql-to-human.ts +++ b/pages/api/sql-to-human.ts @@ -19,6 +19,7 @@ export default async function handler( try { const outputText = await translateToHuman( inputText, + process.env.OPENAI_API_ENDPOINT_URL || "https://api.openai.com", process.env.OPENAI_API_KEY ); res.status(200).json({ outputText }); diff --git a/pages/api/translate.ts b/pages/api/translate.ts index d4f1cf3..65bc6b1 100644 --- a/pages/api/translate.ts +++ b/pages/api/translate.ts @@ -15,6 +15,7 @@ export default async function handler( try { const result = await translateToSQL( inputText, + process.env.OPENAI_API_ENDPOINT_URL || "https://api.openai.com", process.env.OPENAI_API_KEY, tableSchema ); diff --git a/src/translateToHuman.js b/src/translateToHuman.js index 8765158..cb36168 100644 --- a/src/translateToHuman.js +++ b/src/translateToHuman.js @@ -1,12 +1,12 @@ import fetch from "isomorphic-unfetch"; -const translateToHuman = async (query, apiKey) => { +const translateToHuman = async (query, apiEndPointURL, apiKey) => { // Validate inputs if (!query || !apiKey) { throw new Error("Missing query or API key."); } - const response = await fetch("https://api.openai.com/v1/completions", { + const response = await fetch(`${apiEndPointURL}/v1/completions`, { method: "POST", headers: { "Content-Type": "application/json", @@ -18,7 +18,7 @@ const translateToHuman = async (query, apiKey) => { max_tokens: 2048, n: 1, stop: "\\n", - model: "text-davinci-003", + model: process.env.MODEL_ID || "text-davinci-003", frequency_penalty: 0.5, presence_penalty: 0.5, logprobs: 10, diff --git a/src/translateToSQL.js b/src/translateToSQL.js index 1d5ab6e..4e414ca 100644 --- a/src/translateToSQL.js +++ b/src/translateToSQL.js @@ -1,6 +1,6 @@ import fetch from "isomorphic-unfetch"; -const translateToSQL = async (query, apiKey, tableSchema = "") => { +const translateToSQL = async (query, apiEndPointURL, apiKey, tableSchema = "") => { // Validate inputs if (!query || !apiKey) { @@ -10,7 +10,7 @@ const translateToSQL = async (query, apiKey, tableSchema = "") => { const prompt = `Translate this natural language query into SQL without changing the case of the entries given by me:\n\n"${query}"\n\n${tableSchema ? `Use this table schema:\n\n${tableSchema}\n\n` : ''}SQL Query:`; console.log(prompt); - const response = await fetch("https://api.openai.com/v1/completions", { + const response = await fetch(`${apiEndPointURL}/v1/completions`, { method: "POST", headers: { "Content-Type": "application/json", @@ -22,7 +22,7 @@ const translateToSQL = async (query, apiKey, tableSchema = "") => { max_tokens: 2048, n: 1, stop: "\\n", - model: "gpt-3.5-turbo-instruct", + model: process.env.MODEL_ID || "gpt-3.5-turbo-instruct", frequency_penalty: 0.5, presence_penalty: 0.5, logprobs: 10,