chore: sync

This commit is contained in:
lencx
2022-12-23 15:27:05 +08:00
parent 2be560e69a
commit 389e00a5e0
17 changed files with 260 additions and 252 deletions

View File

@@ -2,46 +2,52 @@ import { useState, useEffect } from 'react';
import { clone } from 'lodash';
import { invoke } from '@tauri-apps/api';
import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils';
import { CHAT_MODEL_JSON, CHAT_MODEL_CMD_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit';
export default function useChatModel(key: string, file = CHAT_MODEL_JSON) {
const [modelJson, setModelJson] = useState<Record<string, any>>([]);
const [modelJson, setModelJson] = useState<Record<string, any>>({});
useInit(async () => {
const data = await readJSON(file, {
defaultVal: { name: 'ChatGPT Model', [key]: [] },
defaultVal: { name: 'ChatGPT Model', [key]: null },
});
setModelJson(data);
});
const modelSet = async (data: Record<string, any>[]) => {
const modelSet = async (data: Record<string, any>[]|Record<string, any>) => {
const oData = clone(modelJson);
oData[key] = data;
await writeJSON(file, oData);
await invoke('window_reload', { label: 'core' });
setModelJson(oData);
}
return { modelJson, modelSet, modelData: modelJson?.[key] || [] };
}
export function useCacheModel(file: string) {
const [modelJson, setModelJson] = useState<Record<string, any>[]>([]);
export function useCacheModel(file = '') {
const [modelCacheJson, setModelCacheJson] = useState<Record<string, any>[]>([]);
useEffect(() => {
if (!file) return;
(async () => {
const data = await readJSON(file, { isRoot: true });
setModelJson(data);
const data = await readJSON(file, { isRoot: true, isList: true });
setModelCacheJson(data);
})();
}, [file]);
const modelSet = async (data: Record<string, any>[]) => {
await writeJSON(file, data, { isRoot: true });
await invoke('window_reload', { label: 'core' });
setModelJson(data);
const modelCacheSet = async (data: Record<string, any>[], newFile = '') => {
await writeJSON(newFile ? newFile : file, data, { isRoot: true });
setModelCacheJson(data);
await modelCacheCmd();
}
return { modelJson, modelSet };
const modelCacheCmd = async () => {
// Generate the `chat.model.cmd.json` file and refresh the page for the slash command to take effect.
const list = await invoke('cmd_list');
await writeJSON(CHAT_MODEL_CMD_JSON, { name: 'ChatGPT CMD', last_updated: Date.now(), data: list });
await invoke('window_reload', { label: 'core' });
};
return { modelCacheJson, modelCacheSet, modelCacheCmd };
}

View File

@@ -17,6 +17,9 @@ export default function useData(oData: any[]) {
};
const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return;
console.log('«20» /src/hooks/useData.ts ~> ', val);
const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData);
};

View File

@@ -5,7 +5,7 @@ import useChatModel from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatPromptsPath, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sys_sync_prompts');
const { modelSet } = useChatModel('sync_prompts');
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {