refactor: prompts sync

This commit is contained in:
lencx
2022-12-31 17:59:55 +08:00
parent 9a392a71f6
commit e1f8030009
11 changed files with 185 additions and 140 deletions

View File

@@ -1,6 +1,6 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Modal, Button, message } from 'antd';
import { invoke, http, path, fs } from '@tauri-apps/api';
import { invoke, path, fs } from '@tauri-apps/api';
import useData from '@/hooks/useData';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
@@ -10,7 +10,7 @@ import { CHAT_MODEL_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import { syncColumns, getPath } from './config';
import SyncForm from './Form';
const setTag = (data: Record<string, any>[]) => data.map((i) => ({ ...i, tags: ['user-sync'], enable: true }))
const fmtData = (data: Record<string, any>[] = []) => (Array.isArray(data) ? data : []).map((i) => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), tags: ['user-sync'], enable: true }));
export default function SyncCustom() {
const [isVisible, setVisible] = useState(false);
@@ -44,9 +44,16 @@ export default function SyncCustom() {
setVisible(true);
}
if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
(async () => {
try {
const file = await path.join(await chatRoot(), 'cache_model', `${opInfo?.opRecord?.id}.json`);
await fs.removeFile(file);
} catch(e) {}
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
modelCacheCmd();
})();
}
}, [opInfo.opType, formRef]);
@@ -58,20 +65,9 @@ export default function SyncCustom() {
// https or http
if (/^http/.test(record?.protocol)) {
const res = await http.fetch(filePath, {
method: 'GET',
responseType: isJson ? 1 : 2,
});
if (res.ok) {
if (isJson) {
// parse json
await modelCacheSet(setTag(Array.isArray(res?.data) ? res?.data : []), file);
} else {
// parse csv
const list: Record<string, string>[] = await invoke('parse_prompt', { data: res?.data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] }));
await modelCacheSet(fmtList, file);
}
const data = await invoke('sync_user_prompts', { url: filePath, dataType: record?.ext });
if (data) {
await modelCacheSet(data as [], file);
await modelCacheCmd();
message.success('ChatGPT Prompts data has been synchronized!');
} else {
@@ -83,13 +79,12 @@ export default function SyncCustom() {
if (isJson) {
// parse json
const data = await readJSON(filePath, { isRoot: true });
await modelCacheSet(setTag(Array.isArray(data) ? data : []), file);
await modelCacheSet(fmtData(data), file);
} else {
// parse csv
const data = await fs.readTextFile(filePath);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] }));
await modelCacheSet(fmtList, file);
await modelCacheSet(fmtData(list), file);
}
await modelCacheCmd();
};

View File

@@ -1,13 +1,13 @@
import { useEffect, useState } from 'react';
import { Table, Button, message, Popconfirm } from 'antd';
import { invoke, http, path, shell } from '@tauri-apps/api';
import { Table, Button, Popconfirm } from 'antd';
import { invoke, path, shell } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot, GITHUB_PROMPTS_CSV_URL, genCmd } from '@/utils';
import { fmtDate, chatRoot } from '@/utils';
import { syncColumns } from './config';
import './index.scss';
@@ -33,24 +33,13 @@ export default function SyncPrompts() {
}, [modelCacheJson.length]);
const handleSync = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList);
opInit(fmtList);
const data = await invoke('sync_prompts', { time: Date.now() });
if (data) {
opInit(data as any[]);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
message.success('ChatGPT Prompts data has been synchronized!');
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
}
};

View File

@@ -24,7 +24,9 @@ export const syncColumns = () => [
dataIndex: 'tags',
key: 'tags',
// width: 150,
render: () => <Tag>chatgpt-prompts</Tag>,
render: (v: string[]) => (
<span className="chat-prompts-tags">{v?.map(i => <Tag key={i}>{i}</Tag>)}</span>
),
},
{
title: 'Enable',