diff --git a/README.md b/README.md index bf261d0..44df1df 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ While this project is still under development, a substantial part of the code is + [Gemini API](https://ai.google.dev/gemini-api/docs) + [Groq API](https://console.groq.com) + Local and offline serving via [ollama](https://github.com/ollama/ollama) + + [GitHub Models](https://github.com/marketplace/models) - Flexible API credential management from various sources: + Environment variables + Bash commands @@ -181,6 +182,9 @@ The minimal requirement is to at least set up one provider, hence one from the s openai = { api_key = os.getenv "OPENAI_API_KEY", }, + github = { + api_key = os.getenv "GITHUB_TOKEN", + }, }, } end, diff --git a/lua/parrot/config.lua b/lua/parrot/config.lua index b2bf3f0..e992be1 100644 --- a/lua/parrot/config.lua +++ b/lua/parrot/config.lua @@ -133,6 +133,19 @@ local defaults = { command = { temperature = 1.5, top_p = 1 }, }, }, + github = { + api_key = "", + endpoint = "https://models.inference.ai.azure.com/chat/completions", + topic_prompt = topic_prompt, + topic = { + model = "gpt-4o-mini", + params = {}, + }, + params = { + chat = { temperature = 1.5, top_p = 1 }, + command = { temperature = 1.5, top_p = 1 }, + }, + }, }, cmd_prefix = "Prt", curl_params = {}, diff --git a/lua/parrot/provider/github.lua b/lua/parrot/provider/github.lua new file mode 100644 index 0000000..d50ab8e --- /dev/null +++ b/lua/parrot/provider/github.lua @@ -0,0 +1,65 @@ +local OpenAI = require("parrot.provider.openai") + +local GitHub = setmetatable({}, { __index = OpenAI }) +GitHub.__index = GitHub + +-- Available API parameters for GitHub models +local AVAILABLE_API_PARAMETERS = { + -- required + messages = true, + model = true, + -- optional + max_tokens = true, + temperature = true, + top_p = true, + stop = true, + best_of = true, + presence_penalty = true, + stream = true, +} + +function GitHub:new(endpoint, api_key) + local instance = OpenAI.new(self, endpoint, api_key) + instance.name = "github" + return setmetatable(instance, self) +end + +-- Preprocesses the payload before sending to the API +---@param payload table +---@return table +function GitHub:preprocess_payload(payload) + for _, message in ipairs(payload.messages) do + message.content = message.content:gsub("^%s*(.-)%s*$", "%1") + end + return utils.filter_payload_parameters(AVAILABLE_API_PARAMETERS, payload) +end + +-- Returns the list of available models +---@param online boolean +---@return string[] +function GitHub:get_available_models(online) + return { + "AI21-Jamba-Instruct", + "Cohere-command-r", + "Cohere-command-r-plus", + "Meta-Llama-3-70B-Instruct", + "Meta-Llama-3-8B-Instruct", + "Meta-Llama-3.1-405B-Instruct", + "Meta-Llama-3.1-70B-Instruct", + "Meta-Llama-3.1-8B-Instruct", + "Mistral-small", + "Mistral-Nemo", + "Mistral-large-2407", + "Mistral-large", + "gpt-4o-mini", + "gpt-4o", + "Phi-3-medium-128k-instruct", + "Phi-3-medium-4k-instruct", + "Phi-3-mini-128k-instruct", + "Phi-3-mini-4k-instruct", + "Phi-3-small-128k-instruct", + "Phi-3-small-8k-instruct", + } +end + +return GitHub diff --git a/lua/parrot/provider/init.lua b/lua/parrot/provider/init.lua index d0218f1..73ae26e 100644 --- a/lua/parrot/provider/init.lua +++ b/lua/parrot/provider/init.lua @@ -5,6 +5,7 @@ local Mistral = require("parrot.provider.mistral") local Ollama = require("parrot.provider.ollama") local OpenAI = require("parrot.provider.openai") local Perplexity = require("parrot.provider.perplexity") +local GitHub = require("parrot.provider.github") local logger = require("parrot.logger") local M = {} @@ -17,6 +18,7 @@ M.init_provider = function(prov_name, endpoint, api_key) local providers = { anthropic = Anthropic, gemini = Gemini, + github = GitHub, groq = Groq, mistral = Mistral, ollama = Ollama,