Fix ConfigParser to handle nested text_config structure
- Check both config level and text_config level for fields - num_key_value_heads now found in text_config - num_attention_heads now found in text_config - num_hidden_layers now found in text_config - Warning checks also check both levels
This commit is contained in:
parent
b8352ebd17
commit
2a1522fa70
1 changed files with 14 additions and 6 deletions
20
js/app.js
20
js/app.js
|
|
@ -102,13 +102,17 @@
|
|||
|
||||
const textConfig = config.text_config || config;
|
||||
|
||||
// Required fields
|
||||
// Required fields - check both top level and text_config
|
||||
if (config.num_hidden_layers) {
|
||||
result.fields['num-layers'] = config.num_hidden_layers;
|
||||
} else if (textConfig.num_hidden_layers) {
|
||||
result.fields['num-layers'] = textConfig.num_hidden_layers;
|
||||
}
|
||||
|
||||
if (config.num_key_value_heads) {
|
||||
result.fields['kv-heads'] = config.num_key_value_heads;
|
||||
} else if (textConfig.num_key_value_heads) {
|
||||
result.fields['kv-heads'] = textConfig.num_key_value_heads;
|
||||
}
|
||||
|
||||
if (textConfig.head_dim) {
|
||||
|
|
@ -119,6 +123,8 @@
|
|||
|
||||
if (config.num_attention_heads) {
|
||||
result.fields['num-heads'] = config.num_attention_heads;
|
||||
} else if (textConfig.num_attention_heads) {
|
||||
result.fields['num-heads'] = textConfig.num_attention_heads;
|
||||
}
|
||||
|
||||
if (textConfig.max_position_embeddings) {
|
||||
|
|
@ -446,6 +452,9 @@
|
|||
document.getElementById('k-type').value = 'f16';
|
||||
document.getElementById('v-type').value = 'f16';
|
||||
|
||||
// Get text_config if available
|
||||
const textConfig = config.text_config || config;
|
||||
|
||||
// Parse config using ConfigParser
|
||||
const parsed = ConfigParser.parse(config);
|
||||
|
||||
|
|
@ -464,29 +473,28 @@
|
|||
});
|
||||
|
||||
// Show warnings for default values
|
||||
if (config.parallel === undefined) {
|
||||
if (config.parallel === undefined && (!textConfig || textConfig.parallel === undefined)) {
|
||||
showConfigError('parallel', currentLang === 'ru'
|
||||
? 'Не найден параметр parallel. Использовано значение по умолчанию: 1.'
|
||||
: 'parallel not found. Default value used: 1.');
|
||||
}
|
||||
if (config.model_size_gb === undefined) {
|
||||
if (config.model_size_gb === undefined && (!textConfig || textConfig.model_size_gb === undefined)) {
|
||||
showConfigError('model-size', currentLang === 'ru'
|
||||
? 'Не найден параметр model_size_gb. Использовано значение по умолчанию: 0.'
|
||||
: 'model_size_gb not found. Default value used: 0.');
|
||||
}
|
||||
|
||||
// Show errors for missing required fields
|
||||
if (!config.num_hidden_layers) {
|
||||
if (!config.num_hidden_layers && (!textConfig || !textConfig.num_hidden_layers)) {
|
||||
showConfigError('num-layers', currentLang === 'ru'
|
||||
? 'Не найден параметр num_hidden_layers. Важно для расчёта количества слоёв в KV кеше.'
|
||||
: 'num_hidden_layers not found. Important for calculating KV cache layers.');
|
||||
}
|
||||
if (!config.num_key_value_heads) {
|
||||
if (!config.num_key_value_heads && (!textConfig || !textConfig.num_key_value_heads)) {
|
||||
showConfigError('kv-heads', currentLang === 'ru'
|
||||
? 'Не найден параметр num_key_value_heads. Важно для расчёта голов внимания.'
|
||||
: 'num_key_value_heads not found. Important for attention head calculation.');
|
||||
}
|
||||
const textConfig = config.text_config || config;
|
||||
if (!textConfig.head_dim && !(textConfig.hidden_size && textConfig.num_attention_heads)) {
|
||||
showConfigError('head-size', currentLang === 'ru'
|
||||
? 'Не найден параметр head_dim. Важно для размера каждой головы.'
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue