Compare commits

...

14 Commits

Author SHA1 Message Date
lencx
f071e0d6bc v0.6.8 2022-12-24 22:30:28 +08:00
lencx
2f8ff36638 v0.6.7 2022-12-24 21:37:35 +08:00
lencx
38e319a215 v0.6.6 2022-12-24 21:06:16 +08:00
lencx
05057d06ad fix: unable to synchronize 2022-12-24 21:05:51 +08:00
lencx
413d3354c7 v0.6.5 2022-12-24 20:07:07 +08:00
lencx
f1c7fff800 readme 2022-12-24 20:06:56 +08:00
lencx
6fe90dea5b fix: path not allowed on the configured scope (#64) 2022-12-24 20:04:24 +08:00
lencx
25ab2b0368 chore: optim 2022-12-24 20:04:14 +08:00
lencx
94973b1420 Merge pull request #69 from JacobLinCool/patch-1 2022-12-24 01:10:52 +08:00
JacobLinCool
0930cd782a docs: fix cask name in brewfile section 2022-12-24 00:28:15 +08:00
lencx
0733bba4bf Merge pull request #67 from lencx/fix 2022-12-23 23:07:54 +08:00
lencx
f411541a76 Merge pull request #66 from lencx/fix 2022-12-23 22:41:09 +08:00
lencx
a75ae5e615 Merge pull request #65 from lencx/fix 2022-12-23 21:46:01 +08:00
lencx
7b8f29534b Merge pull request #63 from lencx/fix 2022-12-23 20:21:34 +08:00
22 changed files with 212 additions and 109 deletions

View File

@@ -22,9 +22,9 @@
**最新版:** **最新版:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg) - `Mac`: [ChatGPT_0.6.7_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/ChatGPT_0.6.7_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb) - `Linux`: [chat-gpt_0.6.7_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/chat-gpt_0.6.7_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi) - `Windows`: [ChatGPT_0.6.7_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/ChatGPT_0.6.7_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases) [其他版本...](https://github.com/lencx/ChatGPT/releases)

View File

@@ -24,9 +24,9 @@
**Latest:** **Latest:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg) - `Mac`: [ChatGPT_0.6.7_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/ChatGPT_0.6.7_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb) - `Linux`: [chat-gpt_0.6.7_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/chat-gpt_0.6.7_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi) - `Windows`: [ChatGPT_0.6.7_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.7/ChatGPT_0.6.7_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases) [Other version...](https://github.com/lencx/ChatGPT/releases)
@@ -46,7 +46,7 @@ Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usag
```rb ```rb
repo = "lencx/chatgpt" repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git" tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true } cask "chatgpt", args: { "no-quarantine": true }
``` ```
## 📢 Announcement ## 📢 Announcement

View File

@@ -1,5 +1,9 @@
# UPDATE LOG # UPDATE LOG
## v0.6.8
fix: unable to synchronize
## v0.6.4 ## v0.6.4
fix: path not allowed on the configured scope fix: path not allowed on the configured scope

View File

@@ -23,6 +23,7 @@ log = "0.4.17"
csv = "1.1.6" csv = "1.1.6"
thiserror = "1.0.38" thiserror = "1.0.38"
walkdir = "2.3.2" walkdir = "2.3.2"
regex = "1.7.0"
# tokio = { version = "1.23.0", features = ["macros"] } # tokio = { version = "1.23.0", features = ["macros"] }
# reqwest = "0.11.13" # reqwest = "0.11.13"

View File

@@ -1,5 +1,5 @@
use crate::{conf::ChatConfJson, utils}; use crate::{conf::ChatConfJson, utils::{self, exists}};
use std::{fs, path::PathBuf}; use std::{collections::HashMap, fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager}; use tauri::{api, command, AppHandle, Manager};
#[command] #[command]
@@ -72,7 +72,7 @@ pub fn get_chat_model_cmd() -> serde_json::Value {
serde_json::from_str(&content).unwrap() serde_json::from_str(&content).unwrap()
} }
#[derive(Debug, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord { pub struct PromptRecord {
pub cmd: Option<String>, pub cmd: Option<String>,
pub act: String, pub act: String,
@@ -99,9 +99,8 @@ pub fn window_reload(app: AppHandle, label: &str) {
.unwrap(); .unwrap();
} }
use walkdir::WalkDir;
use utils::chat_root; use utils::chat_root;
use walkdir::WalkDir;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord { pub struct ModelRecord {
@@ -115,12 +114,14 @@ pub struct ModelRecord {
#[command] #[command]
pub fn cmd_list() -> Vec<ModelRecord> { pub fn cmd_list() -> Vec<ModelRecord> {
let mut list = vec![]; let mut list = vec![];
for entry in WalkDir::new(chat_root().join("cache_model")).into_iter().filter_map(|e| e.ok()) { for entry in WalkDir::new(chat_root().join("cache_model"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string()); let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file { if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]); let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter() let enable_list = data.into_iter().filter(|v| v.enable);
.filter(|v| v.enable);
list.extend(enable_list) list.extend(enable_list)
} }
} }
@@ -128,3 +129,72 @@ pub fn cmd_list() -> Vec<ModelRecord> {
list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len())); list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len()));
list list
} }
#[command]
pub fn sync_prompts(app: AppHandle, data: String, time: u64) {
let data = parse_prompt(data)
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
let model = chat_root().join("chat.model.json");
let model_cmd = chat_root().join("chat.model.cmd.json");
let chatgpt_prompts = chat_root().join("cache_model").join("chatgpt_prompts.json");
if !exists(&model) {
fs::write(&model, serde_json::json!({
"name": "ChatGPT Model",
"link": "https://github.com/lencx/ChatGPT"
}).to_string()).unwrap();
}
// chatgpt_prompts.json
fs::write(
chatgpt_prompts,
serde_json::to_string_pretty(&data).unwrap(),
)
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
fs::write(
model_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
"data": cmd_data,
}))
.unwrap(),
)
.unwrap();
let mut kv = HashMap::new();
kv.insert(
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// refresh window
api::dialog::message(
app.get_window("core").as_ref(),
"Sync Prompts",
"ChatGPT Prompts data has been synchronized!",
);
window_reload(app, "core");
}

View File

@@ -8,6 +8,8 @@ use tauri::{
}; };
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt}; use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
use super::window;
// --- Menu // --- Menu
pub fn init() -> Menu { pub fn init() -> Menu {
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
@@ -174,7 +176,7 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
match menu_id { match menu_id {
// Preferences // Preferences
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&app.env()), "restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, script_path), "inject_script" => open(&app, script_path),
"go_conf" => utils::open_file(utils::chat_root()), "go_conf" => utils::open_file(utils::chat_root()),
@@ -182,12 +184,12 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
"awesome" => open(&app, conf::AWESOME_URL.to_string()), "awesome" => open(&app, conf::AWESOME_URL.to_string()),
"sync_prompts" => { "sync_prompts" => {
tauri::api::dialog::ask( tauri::api::dialog::ask(
app.get_window("main").as_ref(), app.get_window("core").as_ref(),
"Sync Prompts", "Sync Prompts",
"Data sync will enable all prompts, are you sure you want to sync?", "Data sync will enable all prompts, are you sure you want to sync?",
move |is_restart| { move |is_restart| {
if is_restart { if is_restart {
app.get_window("main") app.get_window("core")
.unwrap() .unwrap()
.eval("window.__sync_prompts && window.__sync_prompts()") .eval("window.__sync_prompts && window.__sync_prompts()")
.unwrap() .unwrap()
@@ -304,7 +306,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
} }
} }
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() { SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&handle.env()), "restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => { "show_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app)) ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))

View File

@@ -28,3 +28,17 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.unwrap(); .unwrap();
}); });
} }
pub fn control_window(handle: &tauri::AppHandle) {
let app = handle.clone();
std::thread::spawn(move || {
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
.title("ChatGPT")
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
.min_inner_size(800.0, 600.0)
.build()
.unwrap();
});
}

View File

@@ -106,8 +106,6 @@ async function cmdTip() {
// input text // input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) { if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) {
console.log('«110» /src/assets/cmd.js ~> ', __CHAT_MODEL_STATUS__);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__; searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__; delete window.__CHAT_MODEL_STATUS__;

View File

@@ -86,6 +86,26 @@ async function init() {
} }
} }
}); });
window.__sync_prompts = async function() {
const res = await fetch('https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv');
if (res.ok) {
const data = await res.text();
console.log('«94» /src/assets/core.js ~> ', data);
await invoke('sync_prompts', { data, time: Date.now() });
} else {
invoke('messageDialog', {
__tauriModule: 'Dialog',
message: {
cmd: 'messageDialog',
message: 'ChatGPT Prompts data sync failed, please try again!'.toString(),
title: 'Sync Prompts'.toString(),
type: 'error'
}
})
}
}
} }
if ( if (

View File

@@ -61,6 +61,7 @@ fn main() {
cmd::open_file, cmd::open_file,
cmd::get_chat_model_cmd, cmd::get_chat_model_cmd,
cmd::parse_prompt, cmd::parse_prompt,
cmd::sync_prompts,
cmd::window_reload, cmd::window_reload,
cmd::cmd_list, cmd::cmd_list,
fs_extra::metadata, fs_extra::metadata,
@@ -76,7 +77,7 @@ fn main() {
if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() { if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() {
let win = event.window(); let win = event.window();
if win.label() == "main" { if win.label() == "main" {
win.hide().unwrap(); win.close().unwrap();
} else { } else {
// TODO: https://github.com/tauri-apps/tauri/issues/3084 // TODO: https://github.com/tauri-apps/tauri/issues/3084
// event.window().hide().unwrap(); // event.window().hide().unwrap();

View File

@@ -1,6 +1,9 @@
use anyhow::Result; use anyhow::Result;
use log::info; use log::info;
use regex::Regex;
use serde_json::Value;
use std::{ use std::{
collections::HashMap,
fs::{self, File}, fs::{self, File},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
@@ -89,3 +92,21 @@ pub fn clear_conf(app: &tauri::AppHandle) {
}, },
); );
} }
pub fn merge(v: &Value, fields: &HashMap<String, Value>) -> Value {
match v {
Value::Object(m) => {
let mut m = m.clone();
for (k, v) in fields {
m.insert(k.clone(), v.clone());
}
Value::Object(m)
}
v => v.clone(),
}
}
pub fn gen_cmd(name: String) -> String {
let re = Regex::new(r"[^a-zA-Z0-9]").unwrap();
re.replace_all(&name, "_").to_lowercase()
}

View File

@@ -7,7 +7,7 @@
}, },
"package": { "package": {
"productName": "ChatGPT", "productName": "ChatGPT",
"version": "0.6.4" "version": "0.6.8"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
@@ -22,9 +22,9 @@
"fs": { "fs": {
"all": true, "all": true,
"scope": [ "scope": [
"$HOME/*",
"$HOME/.chatgpt/*", "$HOME/.chatgpt/*",
"$HOME/.chatgpt/**", "$HOME/.chatgpt/cache_mode/*"
"$HOME/.chatgpt/cache_model/*"
] ]
} }
}, },
@@ -79,18 +79,6 @@
"https://lencx.github.io/ChatGPT/install.json" "https://lencx.github.io/ChatGPT/install.json"
], ],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK" "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK"
}, }
"windows": [
{
"label": "main",
"url": "index.html",
"title": "ChatGPT",
"visible": false,
"width": 800,
"height": 600,
"minWidth": 800,
"minHeight": 600
}
]
} }
} }

View File

@@ -18,8 +18,6 @@ export default function useData(oData: any[]) {
const opInit = (val: any[] = []) => { const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return; if (!val || !Array.isArray(val)) return;
console.log('«20» /src/hooks/useData.ts ~> ', val);
const nData = val.map(i => ({ [safeKey]: v4(), ...i })); const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData); setData(nData);
}; };

34
src/hooks/useEvent.ts vendored
View File

@@ -1,34 +0,0 @@
import { invoke, path, http, fs, dialog } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sync_prompts');
const { modelCacheSet } = useCacheModel();
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {
(window as any).__sync_prompts = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList, file);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
dialog.message('ChatGPT Prompts data has been synchronized!');
} else {
dialog.message('ChatGPT Prompts data sync failed, please try again!');
}
}
})
}

14
src/main.scss vendored
View File

@@ -45,6 +45,12 @@ html, body {
} }
} }
.chat-table-tip {
> span {
line-height: 16px;
}
}
.chat-sync-path { .chat-sync-path {
font-size: 12px; font-size: 12px;
font-weight: 500; font-weight: 500;
@@ -52,6 +58,14 @@ html, body {
margin-bottom: 5px; margin-bottom: 5px;
line-height: 16px; line-height: 16px;
> div {
max-width: 400px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
color: #2a2a2a;
}
span { span {
display: inline-block; display: inline-block;
// background-color: #d8d8d8; // background-color: #d8d8d8;

14
src/main.tsx vendored
View File

@@ -2,23 +2,15 @@ import { StrictMode, Suspense } from 'react';
import { BrowserRouter } from 'react-router-dom'; import { BrowserRouter } from 'react-router-dom';
import ReactDOM from 'react-dom/client'; import ReactDOM from 'react-dom/client';
import useEvent from '@/hooks/useEvent';
import Layout from '@/layout'; import Layout from '@/layout';
import './main.scss'; import './main.scss';
const App = () => {
useEvent();
return (
<BrowserRouter>
<Layout/>
</BrowserRouter>
);
}
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render( ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
<StrictMode> <StrictMode>
<Suspense fallback={null}> <Suspense fallback={null}>
<App /> <BrowserRouter>
<Layout/>
</BrowserRouter>
</Suspense> </Suspense>
</StrictMode> </StrictMode>
); );

8
src/utils.ts vendored
View File

@@ -20,10 +20,6 @@ export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON); return join(await chatRoot(), CHAT_MODEL_JSON);
} }
// export const chatModelSyncPath = async (): Promise<string> => {
// return join(await chatRoot(), CHAT_MODEL_SYNC_JSON);
// }
export const chatPromptsPath = async (): Promise<string> => { export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV); return join(await chatRoot(), CHAT_PROMPTS_CSV);
} }
@@ -35,7 +31,9 @@ export const readJSON = async (path: string, opts: readJSONOpts = {}) => {
const file = await join(isRoot ? '' : root, path); const file = await join(isRoot ? '' : root, path);
if (!await exists(file)) { if (!await exists(file)) {
await createDir(await dirname(file), { recursive: true }); if (await dirname(file) !== root) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, isList ? '[]' : JSON.stringify({ await writeTextFile(file, isList ? '[]' : JSON.stringify({
name: 'ChatGPT', name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT', link: 'https://github.com/lencx/ChatGPT',

View File

@@ -8,6 +8,7 @@ import useInit from '@/hooks/useInit';
interface SyncFormProps { interface SyncFormProps {
record?: Record<string|symbol, any> | null; record?: Record<string|symbol, any> | null;
type: string;
} }
const initFormValue = { const initFormValue = {
@@ -17,7 +18,8 @@ const initFormValue = {
prompt: '', prompt: '',
}; };
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }, ref) => { const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record, type }, ref) => {
const isDisabled = type === 'edit';
const [form] = Form.useForm(); const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form })); useImperativeHandle(ref, () => ({ form }));
const [root, setRoot] = useState(''); const [root, setRoot] = useState('');
@@ -34,7 +36,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
const pathOptions = ( const pathOptions = (
<Form.Item noStyle name="protocol" initialValue="https"> <Form.Item noStyle name="protocol" initialValue="https">
<Select> <Select disabled={isDisabled}>
<Select.Option value="local">{root}</Select.Option> <Select.Option value="local">{root}</Select.Option>
<Select.Option value="http">http://</Select.Option> <Select.Option value="http">http://</Select.Option>
<Select.Option value="https">https://</Select.Option> <Select.Option value="https">https://</Select.Option>
@@ -43,7 +45,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
); );
const extOptions = ( const extOptions = (
<Form.Item noStyle name="ext" initialValue="json"> <Form.Item noStyle name="ext" initialValue="json">
<Select> <Select disabled={isDisabled}>
<Select.Option value="csv">.csv</Select.Option> <Select.Option value="csv">.csv</Select.Option>
<Select.Option value="json">.json</Select.Option> <Select.Option value="json">.json</Select.Option>
</Select> </Select>
@@ -90,8 +92,13 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
label="PATH" label="PATH"
name="path" name="path"
rules={[{ required: true, message: 'Please input path!' }]} rules={[{ required: true, message: 'Please input path!' }]}
> >
<Input placeholder="YOUR_PATH" addonBefore={pathOptions} addonAfter={extOptions} {...DISABLE_AUTO_COMPLETE} /> <Input
placeholder="YOUR_PATH"
addonBefore={pathOptions}
addonAfter={extOptions}
{...DISABLE_AUTO_COMPLETE}
/>
</Form.Item> </Form.Item>
<Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item> <Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item>
</Form> </Form>

View File

@@ -34,7 +34,7 @@ export const syncColumns = () => [
key: 'last_updated', key: 'last_updated',
width: 140, width: 140,
render: (v: number) => ( render: (v: number) => (
<div style={{ textAlign: 'center' }}> <div>
<HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} /> <HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} />
{ v ? fmtDate(v) : ''} { v ? fmtDate(v) : ''}
</div> </div>
@@ -47,7 +47,15 @@ export const syncColumns = () => [
render: (_: any, row: any, actions: any) => { render: (_: any, row: any, actions: any) => {
return ( return (
<Space> <Space>
<a onClick={() => actions.setRecord(row, 'sync')}>Sync</a> <Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
onConfirm={() => actions.setRecord(row, 'sync')}
okText="Yes"
cancelText="No"
>
<a>Sync</a>
</Popconfirm>
{row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>} {row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>}
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a> <a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm <Popconfirm

View File

@@ -128,12 +128,12 @@ export default function SyncCustom() {
<Modal <Modal
open={isVisible} open={isVisible}
onCancel={hide} onCancel={hide}
title="Model PATH" title="Sync PATH"
onOk={handleOk} onOk={handleOk}
destroyOnClose destroyOnClose
maskClosable={false} maskClosable={false}
> >
<SyncForm ref={formRef} record={opInfo?.opRecord} /> <SyncForm ref={formRef} record={opInfo?.opRecord} type={opInfo.opType} />
</Modal> </Modal>
</div> </div>
) )

View File

@@ -69,6 +69,16 @@ export default function SyncPrompts() {
return ( return (
<div> <div>
<div className="chat-table-btns"> <div className="chat-table-btns">
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
<div> <div>
{selectedItems.length > 0 && ( {selectedItems.length > 0 && (
<> <>
@@ -78,15 +88,6 @@ export default function SyncPrompts() {
</> </>
)} )}
</div> </div>
<Popconfirm
title={<span>Data sync will enable all prompts,<br/>are you sure you want to sync?</span>}
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
</div> </div>
<div className="chat-table-tip"> <div className="chat-table-tip">
<div className="chat-sync-path"> <div className="chat-sync-path">

View File

@@ -10,7 +10,7 @@ export const syncColumns = () => [
// width: 120, // width: 120,
key: 'cmd', key: 'cmd',
render: (_: string, row: Record<string, string>) => ( render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag> <Tag color="#2a2a2a">/{row.cmd ? row.cmd : genCmd(row.act)}</Tag>
), ),
}, },
{ {