Skip to content

server : (webui) let server send locally-defined default webui settings #14468

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion common/arg.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3465,6 +3465,14 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
}
).set_examples({LLAMA_EXAMPLE_SERVER}));

add_opt(common_arg(
{"--default-client-config"}, "JSON_FNAME",
string_format("JSON file containing the default client config"),
[](common_params & params, const std::string & value) {
params.public_default_client_config = value;
}
).set_examples({LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_DEFAULT_CLIENT_CONFIG"));

add_opt(common_arg(
{ "--diffusion-steps" }, "N",
string_format("number of diffusion steps (default: %d)", params.diffusion.steps),
Expand Down Expand Up @@ -3510,6 +3518,5 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
[](common_params & params, const std::string & value) { params.diffusion.add_gumbel_noise = std::stof(value); }
).set_examples({ LLAMA_EXAMPLE_DIFFUSION }));


return ctx_arg;
}
1 change: 1 addition & 0 deletions common/common.h
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,7 @@ struct common_params {

std::string hostname = "127.0.0.1";
std::string public_path = ""; // NOLINT
std::string public_default_client_config = ""; // NOLINT
std::string api_prefix = ""; // NOLINT
std::string chat_template = ""; // NOLINT
bool use_jinja = false; // NOLINT
Expand Down
Binary file modified tools/server/public/index.html.gz
Binary file not shown.
12 changes: 12 additions & 0 deletions tools/server/server.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
#include <cstddef>
#include <cinttypes>
#include <deque>
#include <fstream>
#include <memory>
#include <mutex>
#include <signal.h>
Expand Down Expand Up @@ -1945,6 +1946,7 @@ struct server_context {
// slots / clients
std::vector<server_slot> slots;
json default_generation_settings_for_props;
json default_client_config = json::object();

server_queue queue_tasks;
server_response queue_results;
Expand Down Expand Up @@ -2138,6 +2140,15 @@ struct server_context {

default_generation_settings_for_props = slots[0].to_json();

if (!params_base.public_default_client_config.empty()) {
std::ifstream file(params_base.public_default_client_config);
LOG_INF("%s: Loading default client config from %s\n", __func__, params_base.public_default_client_config.c_str());
if (!file.is_open()) {
throw std::runtime_error("Error: default client config file not open");
}
file >> default_client_config;
}

// the update_slots() logic will always submit a maximum of n_batch or n_parallel tokens
// note that n_batch can be > n_ctx (e.g. for non-causal attention models such as BERT where the KV cache is not used)
{
Expand Down Expand Up @@ -4157,6 +4168,7 @@ int main(int argc, char ** argv) {
{ "bos_token", common_token_to_piece(ctx_server.ctx, llama_vocab_bos(ctx_server.vocab), /* special= */ true)},
{ "eos_token", common_token_to_piece(ctx_server.ctx, llama_vocab_eos(ctx_server.vocab), /* special= */ true)},
{ "build_info", build_info },
{ "default_client_config", ctx_server.default_client_config },
};
if (ctx_server.params_base.use_jinja) {
if (auto tool_use_src = common_chat_templates_source(ctx_server.chat_templates.get(), "tool_use")) {
Expand Down
23 changes: 21 additions & 2 deletions tools/server/webui/src/components/SettingDialog.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { useState } from 'react';
import { useState, useEffect } from 'react';
import { useAppContext } from '../utils/app.context';
import { CONFIG_DEFAULT, CONFIG_INFO } from '../Config';
import { isDev } from '../Config';
Expand Down Expand Up @@ -276,7 +276,7 @@ export default function SettingDialog({
show: boolean;
onClose: () => void;
}) {
const { config, saveConfig } = useAppContext();
const { config, saveConfig, serverProps } = useAppContext();
const [sectionIdx, setSectionIdx] = useState(0);

// clone the config object to prevent direct mutation
Expand All @@ -285,6 +285,25 @@ export default function SettingDialog({
);
const { showConfirm, showAlert } = useModals();

// get default client settings
useEffect(() => {
if (
serverProps &&
serverProps.default_client_config &&
Object.keys(serverProps.default_client_config).length > 0
) {
if (StorageUtils.setDefaultConfig(serverProps.default_client_config)) {
console.log(
'Setting default config:',
serverProps.default_client_config
);
const newConfig = StorageUtils.getConfig();
saveConfig(newConfig);
setLocalConfig(JSON.parse(JSON.stringify(newConfig)));
}
}
}, [serverProps, saveConfig]);

const resetConfig = async () => {
if (await showConfirm('Are you sure you want to reset all settings?')) {
setLocalConfig(CONFIG_DEFAULT);
Expand Down
14 changes: 14 additions & 0 deletions tools/server/webui/src/utils/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,20 @@ const StorageUtils = {
localStorage.setItem('theme', theme);
}
},
setDefaultConfig(defaultConfig: object | null | undefined): boolean {
if (localStorage.getItem('config') === null) {
try {
// Ensure there still is no config when we overwrite it
if (localStorage.getItem('config') === null) {
localStorage.setItem('config', JSON.stringify(defaultConfig));
}
return true;
} catch (e) {
console.error(e);
}
}
return false;
},
};

export default StorageUtils;
Expand Down
1 change: 1 addition & 0 deletions tools/server/webui/src/utils/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -134,5 +134,6 @@ export interface LlamaCppServerProps {
vision: boolean;
audio: boolean;
};
default_client_config: Record<string, number | string | boolean>;
// TODO: support params
}
Loading