Skip to content

Commit

Permalink
ProductivitySuite UI: Add multiple models support in ChatQnA. Choose …
Browse files Browse the repository at this point in the history
…different models from dropdown
  • Loading branch information
sgurunat committed Oct 29, 2024
1 parent debdd0f commit 0a5584a
Show file tree
Hide file tree
Showing 3 changed files with 102 additions and 7 deletions.
Original file line number Diff line number Diff line change
@@ -1,25 +1,92 @@
import { NumberInput, Slider, Text, Title } from "@mantine/core"
import { NumberInput, Select, Slider, Text, Title } from "@mantine/core"
import { useAppDispatch, useAppSelector } from "../../redux/store"
import { conversationSelector, setTemperature, setToken } from "../../redux/Conversation/ConversationSlice"

import { conversationSelector, setTemperature, setToken, setModel, setMinToken, setMaxToken, setModels} from "../../redux/Conversation/ConversationSlice"
import { useEffect } from "react";


function Settings() {
const { token,maxTemperature, minTemperature, maxToken, minToken, temperature} = useAppSelector(conversationSelector)
const { token, maxTemperature, minTemperature, maxToken, minToken, temperature, models, model } = useAppSelector(conversationSelector)
const dispatch = useAppDispatch();


const modelOptions = models.map(model => ({
value: model.model_name,
label: model.displayName,
minToken: model.minToken,
maxToken: model.maxToken,
}));

const onModelChange = (value: string | null) => {
if (value) {
const selectedModel = models.find(m => m.model_name === value);
if (selectedModel) {
dispatch(setModel(value));
dispatch(setTemperature(0.4)); // Assuming you want to reset to a default value
dispatch(setToken(selectedModel.minToken));
dispatch(setMinToken(selectedModel.minToken));
dispatch(setMaxToken(selectedModel.maxToken));
// You might also want to update the min and max token values in the redux state here
}
}
};
const onTemperatureChange = (value: number) => {
dispatch(setTemperature(value))
}
const onTokenChange = (value: number | string) => {
dispatch(setToken(Number(value)))
}

const callFunctions = async () => {
try {
const response = await fetch('/model_configs.json');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const model_configs = await response.json();
// Check if the array is empty
if (model_configs.length === 0) {
throw new Error('The model_configs.json file contains an empty array.');
}
// Validate that each object contains the required fields with non-empty values
const requiredFields = ['model_name', 'displayName', 'endpoint', 'minToken', 'maxToken'];
for (const config of model_configs) {
for (const field of requiredFields) {
if (!(field in config) || config[field] === '') {
throw new Error(`One or more configurations are missing the required field '${field}' or the field is empty.`);
}
}
}
// After validation, update the state with the new configs
dispatch(setModels(model_configs));
dispatch(setMinToken(model_configs[0].minToken));
dispatch(setMaxToken(model_configs[0].maxToken));
dispatch(setModel(model_configs[0].model_name));
} catch (error) {
console.warn('model_configs.json not found, using default configuration.', error);
// If the fetch fails, the state will remain with the default values
}
}

useEffect(() => {
callFunctions()
}, [])

return (
<>

<div>
<Title order={4}>Settings</Title>
</div>
{models.length > 0 && (
<div>
<Select
label="Model"
placeholder="Pick a model"
value={model}
onChange={onModelChange}
data={modelOptions}
/>
</div>
)}
<div>
<Text>Temperature</Text>
<Slider
Expand All @@ -41,7 +108,7 @@ function Settings() {
/>
</div>
</>

)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,21 @@ type file = {
name: string;
};

export type Model = {
model_name: string;
displayName: string;
minToken: number,
maxToken:number
}

export interface ConversationReducer {
selectedConversationId: string;
conversations: Conversation[];
selectedConversationHistory: Message[];
onGoingResult: string;
filesInDataSource: file[];
systemPrompt: string;
models: Model[];
model: string;
minToken: number;
maxToken: number;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@ import {
CHAT_HISTORY_GET,
CHAT_HISTORY_DELETE,
} from "../../config";
import { Model } from './Conversation';

const initialState: ConversationReducer = {
conversations: [],
selectedConversationId: "",
selectedConversationHistory: [],
onGoingResult: "",
filesInDataSource: [],
models: [],
model: "Intel/neural-chat-7b-v3-3",
systemPrompt: "You are helpful assistant",
minToken: 100,
Expand Down Expand Up @@ -68,6 +70,18 @@ export const ConversationSlice = createSlice({
setSystemPrompt: (state, action: PayloadAction<string>) => {
state.systemPrompt = action.payload;
},
setModel: (state, action: PayloadAction<string>) => {
state.model = action.payload;
},
setMinToken: (state, action: PayloadAction<number>) => {
state.minToken = action.payload;
},
setMaxToken: (state, action: PayloadAction<number>) => {
state.maxToken = action.payload;
},
setModels: (state, action: PayloadAction<Model []>) => {
state.models = action.payload;
}
},
extraReducers(builder) {
builder.addCase(uploadFile.fulfilled, () => {
Expand Down Expand Up @@ -231,7 +245,9 @@ export const deleteConversation = createAsyncThunkWrapper(

export const doConversation = (conversationRequest: ConversationRequest) => {
const { conversationId, userPrompt, messages, model, token, temperature } = conversationRequest;
store.dispatch(addMessageToMessages(messages[0]));
if(messages.length==1){
store.dispatch(addMessageToMessages(messages[0]));
}
store.dispatch(addMessageToMessages(userPrompt));
const userPromptWithoutTime = {
role: userPrompt.role,
Expand Down Expand Up @@ -321,6 +337,10 @@ export const {
setTemperature,
setToken,
setSystemPrompt,
setModel,
setMinToken,
setMaxToken,
setModels
} = ConversationSlice.actions;
export const conversationSelector = (state: RootState) => state.conversationReducer;
export default ConversationSlice.reducer;

0 comments on commit 0a5584a

Please sign in to comment.