Skip to content

Commit

Permalink
Send initialization arguments only when stored in localStorage (All-H…
Browse files Browse the repository at this point in the history
…ands-AI#618)

* Send initialization arguments only when stored in localStorage

* Fix mock default-model path
  • Loading branch information
yimothysu committed Apr 3, 2024
1 parent 310cd70 commit 0c00e84
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 5 deletions.
4 changes: 2 additions & 2 deletions frontend/.eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
{
"files": ["*.ts", "*.tsx"],
"rules": {
// Allow state modification in Redux reducers
// Allow state modification in reduce and Redux reducers
"no-param-reassign": ["error", {
"props": true,
"ignorePropertyModificationsFor": [
"state"
"acc", "state"
]
}],
// For https://stackoverflow.com/questions/55844608/stuck-with-eslint-error-i-e-separately-loops-should-be-avoided-in-favor-of-arra
Expand Down
9 changes: 8 additions & 1 deletion frontend/src/components/SettingModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import {
fetchAgents,
INITIAL_MODELS,
sendSettings,
getInitialModel,
} from "../services/settingsService";
import {
setModel,
Expand Down Expand Up @@ -54,6 +55,12 @@ function SettingModal({ isOpen, onClose }: Props): JSX.Element {
);

useEffect(() => {
async function setInitialModel() {
const initialModel = await getInitialModel();
store.dispatch(setModel(initialModel));
}
setInitialModel();

fetchModels().then((fetchedModels) => {
setSupportedModels(fetchedModels);
localStorage.setItem("supportedModels", JSON.stringify(fetchedModels));
Expand Down Expand Up @@ -100,7 +107,7 @@ function SettingModal({ isOpen, onClose }: Props): JSX.Element {
}))}
label="Model"
placeholder="Select a model"
defaultSelectedKey={model}
selectedKey={model}
// className="max-w-xs"
onSelectionChange={(key) => {
store.dispatch(setModel(key as string));
Expand Down
9 changes: 9 additions & 0 deletions frontend/src/services/settingsService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@ import { appendAssistantMessage } from "../state/chatSlice";
import { setInitialized } from "../state/taskSlice";
import store from "../store";

export async function getInitialModel() {
if (localStorage.getItem("model")) {
return localStorage.getItem("model");
}

const res = await fetch("/api/default-model");
return res.json();
}

export async function fetchModels() {
const response = await fetch(`/api/litellm-models`);
return response.json();
Expand Down
12 changes: 11 additions & 1 deletion frontend/src/socket/socket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,17 @@ const WS_URL = `ws://${window.location.host}/ws`;
const socket = new WebSocket(WS_URL);

socket.addEventListener("open", () => {
const { settings } = store.getState();
const settingKeys = ["model", "agent", "workspaceDirectory"];
const settings = settingKeys.reduce(
(acc, key) => {
const value = localStorage.getItem(key);
if (value) {
acc[key] = value;
}
return acc;
},
{} as Record<string, string>,
);
sendSettings(socket, settings, false);
});
socket.addEventListener("message", (event) => {
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/state/settingsSlice.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { createSlice } from "@reduxjs/toolkit";
export const settingsSlice = createSlice({
name: "settings",
initialState: {
model: localStorage.getItem("model") || "gpt-4-0125-preview",
model: localStorage.getItem("model") || "",
agent: localStorage.getItem("agent") || "MonologueAgent",
workspaceDirectory:
localStorage.getItem("workspaceDirectory") || "./workspace",
Expand Down
4 changes: 4 additions & 0 deletions opendevin/mock/listen.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,9 @@ def read_llm_agents():
"PlannerAgent",
]

@app.get("/default-model")
def read_default_model():
return "gpt-4"

if __name__ == "__main__":
uvicorn.run(app, host="127.0.0.1", port=3000)
5 changes: 5 additions & 0 deletions opendevin/server/listen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import agenthub # noqa F401 (we import this to get the agents registered)
import litellm
from opendevin.agent import Agent
from opendevin import config

app = FastAPI()

Expand Down Expand Up @@ -36,3 +37,7 @@ async def get_litellm_agents():
Get all agents supported by LiteLLM.
"""
return Agent.listAgents()

@app.get("/default-model")
def read_default_model():
return config.get_or_error("LLM_MODEL")

0 comments on commit 0c00e84

Please sign in to comment.