Skip to content

Commit

Permalink
Passthrough agentModel for LMStudio (#2499)
Browse files Browse the repository at this point in the history
  • Loading branch information
timothycarambat authored Oct 18, 2024
1 parent ab6f03c commit 7342839
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 5 deletions.
7 changes: 5 additions & 2 deletions server/utils/AiProviders/lmStudio/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ const {

// hybrid of openAi LLM chat completion for LMStudio
class LMStudioLLM {
constructor(embedder = null, _modelPreference = null) {
constructor(embedder = null, modelPreference = null) {
if (!process.env.LMSTUDIO_BASE_PATH)
throw new Error("No LMStudio API Base Path was set.");

Expand All @@ -21,7 +21,10 @@ class LMStudioLLM {
// and any other value will crash inferencing. So until this is patched we will
// try to fetch the `/models` and have the user set it, or just fallback to "Loaded from Chat UI"
// which will not impact users with <v0.2.17 and should work as well once the bug is fixed.
this.model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
this.model =
modelPreference ||
process.env.LMSTUDIO_MODEL_PREF ||
"Loaded from Chat UI";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
Expand Down
2 changes: 1 addition & 1 deletion server/utils/agents/aibitat/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -756,7 +756,7 @@ ${this.getHistory({ to: route.to })
case "anthropic":
return new Providers.AnthropicProvider({ model: config.model });
case "lmstudio":
return new Providers.LMStudioProvider({});
return new Providers.LMStudioProvider({ model: config.model });
case "ollama":
return new Providers.OllamaProvider({ model: config.model });
case "groq":
Expand Down
9 changes: 7 additions & 2 deletions server/utils/agents/aibitat/providers/lmstudio.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,14 @@ const UnTooled = require("./helpers/untooled.js");
class LMStudioProvider extends InheritMultiple([Provider, UnTooled]) {
model;

constructor(_config = {}) {
/**
*
* @param {{model?: string}} config
*/
constructor(config = {}) {
super();
const model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
const model =
config?.model || process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
const client = new OpenAI({
baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
apiKey: null,
Expand Down

0 comments on commit 7342839

Please sign in to comment.