Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: model meta data #271

Merged
merged 1 commit into from
Dec 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions src/components/auto-tooltip/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -119,13 +119,17 @@ const AutoTooltip: React.FC<AutoTooltipProps> = ({
borderRadius: 12
}}
closeIcon={
<CloseOutlined
style={{
position: 'absolute',
right: 8,
top: 8
}}
/>
tagProps.closable ? (
<CloseOutlined
style={{
position: 'absolute',
right: 8,
top: 8
}}
/>
) : (
false
)
}
>
{children}
Expand Down
1 change: 0 additions & 1 deletion src/components/image-editor/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,6 @@ const CanvasImageEditor: React.FC<CanvasImageEditorProps> = ({
ctx.beginPath();

stroke.forEach((point, i) => {
console.log('Drawing Point:', point);
if (i === 0) {
ctx.moveTo(point.x, point.y);
} else {
Expand Down
1 change: 1 addition & 0 deletions src/config/global.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ declare namespace Global {
label: string;
locale?: boolean;
value: T;
meta?: Record<string, any>;
}

interface HintOptions {
Expand Down
2 changes: 1 addition & 1 deletion src/config/route-cachekey.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
export default {
playgroundTextToImage: '/playground/text-to-image'
'/playground/text-to-image': '/playground/text-to-image'
};
18 changes: 12 additions & 6 deletions src/layouts/index.tsx
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
// @ts-nocheck

import { routeCacheAtom } from '@/atoms/route-cache';
import { routeCacheAtom, setRouteCache } from '@/atoms/route-cache';
import { GPUStackVersionAtom, UpdateCheckAtom, userAtom } from '@/atoms/user';
import ShortCuts, {
modalConfig as ShortCutsConfig
} from '@/components/short-cuts';
import VersionInfo, { modalConfig } from '@/components/version-info';
import routeCachekey from '@/config/route-cachekey';
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import { logout } from '@/pages/login/apis';
import { useAccessMarkedRoutes } from '@@/plugin-access';
Expand Down Expand Up @@ -106,8 +107,6 @@ export default (props: any) => {
const [collapsed, setCollapsed] = useState(false);
const [collapseValue, setCollapseValue] = useState(false);

console.log('routeCache========', routeCache);

const initialInfo = (useModel && useModel('@@initialState')) || {
initialState: undefined,
loading: false,
Expand Down Expand Up @@ -140,10 +139,15 @@ export default (props: any) => {
});
};

const initRouteCacheValue = (pathname) => {
if (routeCache.get(pathname) === undefined && routeCachekey[pathname]) {
setRouteCache(pathname, false);
}
};

const dropRouteCache = (pathname) => {
console.log('routeCache.keys()========', routeCache.keys());
for (let key of routeCache.keys()) {
if (key !== pathname && !routeCache.get(key)) {
if (key !== pathname && !routeCache.get(key) && routeCachekey[key]) {
dropByCacheKey(key);
routeCache.delete(key);
}
Expand Down Expand Up @@ -302,6 +306,9 @@ export default (props: any) => {
const { location } = history;
const { pathname } = location;

initRouteCacheValue(pathname);
dropRouteCache(pathname);

// if user is not change password, redirect to change password page
if (
location.pathname !== loginPath &&
Expand All @@ -321,7 +328,6 @@ export default (props: any) => {
: '/playground';
history.push(pathname);
}
dropRouteCache(pathname);
}}
formatMessage={formatMessage}
menu={{
Expand Down
8 changes: 7 additions & 1 deletion src/locales/en-US/playground.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,5 +124,11 @@ export default {
'playground.params.size.description':
'The maximum size of the generated image is controlled by the deployment parameters of the model. <a href="https://github.com/gpustack/llama-box" target="_blank">Refer to</a>',
'playground.documents.verify.embedding': 'At least add two pieces of text.',
'playground.documents.verify.rerank': 'The documents cannot be empty.'
'playground.documents.verify.rerank': 'The documents cannot be empty.',
'playground.image.guidance.tip':
'The lower the value, the higher the diversity, and the lower the adherence to the prompt.',
'playground.image.cfg_scale.tip':
'The lower the value, the higher the diversity.',
'playground.image.strength.tip':
'The higher the value, the greater the modification to the original image.'
};
5 changes: 4 additions & 1 deletion src/locales/zh-CN/playground.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,5 +121,8 @@ export default {
'playground.params.size.description':
'图片生成的最大尺寸受控于模型的部署参数。<a href="https://github.com/gpustack/llama-box" target="_blank">参考文档</a>',
'playground.documents.verify.embedding': '至少输入两条文本',
'playground.documents.verify.rerank': '文档不能为空'
'playground.documents.verify.rerank': '文档不能为空',
'playground.image.guidance.tip': '值越低,多样性越高,对提示词的贴合度越低',
'playground.image.cfg_scale.tip': '值越低,多样性越高',
'playground.image.strength.tip': '值越高,它对原图的修改越大'
};
1 change: 0 additions & 1 deletion src/pages/llmodels/components/advance-config.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@ const AdvanceConfig: React.FC<AdvanceConfigProps> = (props) => {
<Form.Item<FormData> name="categories">
<SealSelect
allowNull
maxCount={1}
label={intl.formatMessage({
id: 'models.form.categories'
})}
Expand Down
56 changes: 34 additions & 22 deletions src/pages/llmodels/components/data-form.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import React, {
useEffect,
useImperativeHandle,
useMemo,
useRef,
useState
} from 'react';
import { queryGPUList } from '../apis';
Expand Down Expand Up @@ -43,6 +44,30 @@ const SEARCH_SOURCE = [
modelSourceMap.modelscope_value
];

const sourceOptions = [
{
label: 'Hugging Face',
value: modelSourceMap.huggingface_value,
key: 'huggingface'
},
{
label: 'Ollama Library',
value: modelSourceMap.ollama_library_value,
key: 'ollama_library'
},
{
label: 'ModelScope',
value: modelSourceMap.modelscope_value,
key: 'model_scope'
},
{
label: 'models.form.localPath',
locale: true,
value: modelSourceMap.local_path_value,
key: 'local_path'
}
];

const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
const { action, isGGUF, onOk } = props;
const [form] = Form.useForm();
Expand All @@ -57,28 +82,7 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
speech2text: false
});

const sourceOptions = [
{
label: 'Hugging Face',
value: modelSourceMap.huggingface_value,
key: 'huggingface'
},
{
label: 'Ollama Library',
value: modelSourceMap.ollama_library_value,
key: 'ollama_library'
},
{
label: 'ModelScope',
value: modelSourceMap.modelscope_value,
key: 'model_scope'
},
{
label: intl.formatMessage({ id: 'models.form.localPath' }),
value: modelSourceMap.local_path_value,
key: 'local_path'
}
];
const localPathCache = useRef<string>('');

const getGPUList = async () => {
const data = await queryGPUList();
Expand Down Expand Up @@ -186,8 +190,15 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
}
};

const handleOnFocus = () => {
localPathCache.current = form.getFieldValue('local_path');
};

const handleLocalPathBlur = (e: any) => {
const value = e.target.value;
if (value === localPathCache.current && value) {
return;
}
const isEndwithGGUF = _.endsWith(value, '.gguf');
let backend = backendOptionsMap.llamaBox;
if (!isEndwithGGUF) {
Expand Down Expand Up @@ -344,6 +355,7 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
>
<SealInput.Input
onBlur={handleLocalPathBlur}
onFocus={handleOnFocus}
label={intl.formatMessage({ id: 'models.form.filePath' })}
required
></SealInput.Input>
Expand Down
1 change: 1 addition & 0 deletions src/pages/llmodels/components/instance-item.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ const InstanceItem: React.FC<InstanceItemProps> = ({
InstanceStatusMap.Initializing,
InstanceStatusMap.Running,
InstanceStatusMap.Error,
InstanceStatusMap.Starting,
InstanceStatusMap.Downloading
],
icon: <IconFont type="icon-logs" />
Expand Down
96 changes: 62 additions & 34 deletions src/pages/llmodels/components/update-modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,14 @@ import { PageActionType } from '@/config/types';
import { useIntl } from '@umijs/max';
import { Form, Modal, Tooltip, Typography } from 'antd';
import _ from 'lodash';
import React, { memo, useCallback, useEffect, useMemo, useState } from 'react';
import React, {
memo,
useCallback,
useEffect,
useMemo,
useRef,
useState
} from 'react';
import SimpleBar from 'simplebar-react';
import 'simplebar-react/dist/simplebar.min.css';
import { queryGPUList } from '../apis';
Expand All @@ -35,13 +42,37 @@ const SEARCH_SOURCE = [
modelSourceMap.modelscope_value
];

const sourceOptions = [
{
label: 'Hugging Face',
value: modelSourceMap.huggingface_value,
key: 'huggingface'
},
{
label: 'Ollama Library',
value: modelSourceMap.ollama_library_value,
key: 'ollama_library'
},
{
label: 'ModelScope',
value: modelSourceMap.modelscope_value,
key: 'model_scope'
},
{
label: 'models.form.localPath',
value: modelSourceMap.local_path_value,
key: 'local_path'
}
];

const UpdateModal: React.FC<AddModalProps> = (props) => {
const { title, action, open, onOk, onCancel } = props || {};
const [form] = Form.useForm();
const intl = useIntl();
const [gpuOptions, setGpuOptions] = useState<any[]>([]);
const [isGGUF, setIsGGUF] = useState<boolean>(false);
const [loading, setLoading] = useState(false);
const localPathCache = useRef<string>('');

const getGPUList = async () => {
const data = await queryGPUList();
Expand All @@ -57,29 +88,6 @@ const UpdateModal: React.FC<AddModalProps> = (props) => {
setGpuOptions(list);
};

const sourceOptions = [
{
label: 'Hugging Face',
value: modelSourceMap.huggingface_value,
key: 'huggingface'
},
{
label: 'Ollama Library',
value: modelSourceMap.ollama_library_value,
key: 'ollama_library'
},
{
label: 'ModelScope',
value: modelSourceMap.modelscope_value,
key: 'model_scope'
},
{
label: intl.formatMessage({ id: 'models.form.localPath' }),
value: modelSourceMap.local_path_value,
key: 'local_path'
}
];

useEffect(() => {
if (action === PageAction.EDIT && open) {
const result = setSourceRepoConfigValue(
Expand All @@ -106,6 +114,34 @@ const UpdateModal: React.FC<AddModalProps> = (props) => {
setIsGGUF(props.data?.backend === backendOptionsMap.llamaBox);
}, [props.data?.backend]);

const handleBackendChange = useCallback((val: string) => {
if (val === backendOptionsMap.llamaBox) {
form.setFieldsValue({
distributed_inference_across_workers: true,
cpu_offloading: true
});
}
form.setFieldValue('backend_version', '');
}, []);

const handleOnFocus = () => {
localPathCache.current = form.getFieldValue('local_path');
};

const handleLocalPathBlur = (e: any) => {
const value = e.target.value;
if (value === localPathCache.current && value) {
return;
}
const isEndwithGGUF = _.endsWith(value, '.gguf');
let backend = backendOptionsMap.llamaBox;
if (!isEndwithGGUF) {
backend = backendOptionsMap.vllm;
}
handleBackendChange?.(backend);
form.setFieldValue('backend', backend);
};

const renderHuggingfaceFields = () => {
return (
<>
Expand Down Expand Up @@ -250,6 +286,8 @@ const UpdateModal: React.FC<AddModalProps> = (props) => {
]}
>
<SealInput.Input
onBlur={handleLocalPathBlur}
onFocus={handleOnFocus}
disabled={false}
label={intl.formatMessage({ id: 'models.form.filePath' })}
required
Expand Down Expand Up @@ -283,16 +321,6 @@ const UpdateModal: React.FC<AddModalProps> = (props) => {
form.submit();
};

const handleBackendChange = useCallback((val: string) => {
if (val === backendOptionsMap.llamaBox) {
form.setFieldsValue({
distributed_inference_across_workers: true,
cpu_offloading: true
});
}
form.setFieldValue('backend_version', '');
}, []);

const handleOk = (formdata: FormData) => {
let obj = {};
if (formdata.backend === backendOptionsMap.vllm) {
Expand Down
Loading