Compare commits

..

13 Commits

Author SHA1 Message Date
lencx
5dd671c98e v0.5.1 2022-12-20 01:14:36 +08:00
lencx
75a7b9c78d readme 2022-12-20 01:14:26 +08:00
lencx
47a3bace5b chore: optim 2022-12-19 23:09:17 +08:00
lencx
8966ebbd03 Merge pull request #46 from lencx/dev 2022-12-19 03:12:07 +08:00
lencx
3fe04a244a v0.5.0 2022-12-19 02:57:15 +08:00
lencx
c54aec88c0 feat: chatgpt-prompts sync 2022-12-19 02:56:53 +08:00
lencx
02fb4dd3b7 chore: windows conf 2022-12-18 13:30:27 +08:00
lencx
028ef8bae8 Merge pull request #44 from lencx/fix 2022-12-18 12:11:05 +08:00
lencx
e86bf42cc1 v0.4.2 2022-12-18 11:52:49 +08:00
lencx
09b8643d99 feat: add log 2022-12-18 11:52:37 +08:00
lencx
c07fd1e0b8 chore: add log 2022-12-18 11:50:34 +08:00
lencx
1b71bf8f26 Merge pull request #40 from lencx/fix 2022-12-17 21:51:09 +08:00
lencx
4366b8ee8a readme 2022-12-17 21:33:23 +08:00
26 changed files with 542 additions and 64 deletions

View File

@@ -22,9 +22,9 @@
**最新版:**
- `Mac`: [ChatGPT_0.4.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64.dmg)
- `Linux`: [chat-gpt_0.4.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/chat-gpt_0.4.0_amd64.deb)
- `Windows`: [ChatGPT_0.4.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64_en-US.msi)
- `Mac`: [ChatGPT_0.5.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64.dmg)
- `Linux`: [chat-gpt_0.5.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/chat-gpt_0.5.1_amd64.deb)
- `Windows`: [ChatGPT_0.5.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases)

View File

@@ -23,9 +23,9 @@
**Latest:**
- `Mac`: [ChatGPT_0.4.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64.dmg)
- `Linux`: [chat-gpt_0.4.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/chat-gpt_0.4.0_amd64.deb)
- `Windows`: [ChatGPT_0.4.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64_en-US.msi)
- `Mac`: [ChatGPT_0.5.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64.dmg)
- `Linux`: [chat-gpt_0.5.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/chat-gpt_0.5.1_amd64.deb)
- `Windows`: [ChatGPT_0.5.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases)

View File

@@ -1,5 +1,17 @@
# UPDATE LOG
## v0.5.1
some optimization
## v0.5.0
feat: `Control Center` added `chatgpt-prompts` synchronization
## v0.4.2
add chatgpt log (path: `~/.chatgpt/chatgpt.log`)
## v0.4.1
fix:

View File

@@ -33,6 +33,7 @@
"@ant-design/icons": "^4.8.0",
"@tauri-apps/api": "^1.2.0",
"antd": "^5.0.6",
"dayjs": "^1.11.7",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-dom": "^18.2.0",

View File

@@ -19,6 +19,14 @@ serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] }
tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] }
log = "0.4.17"
csv = "1.1.6"
thiserror = "1.0.38"
[dependencies.tauri-plugin-log]
git = "https://github.com/tauri-apps/tauri-plugin-log"
branch = "dev"
features = ["colored"]
[features]
# by default Tauri runs in production mode

View File

@@ -70,4 +70,21 @@ pub fn get_chat_model() -> serde_json::Value {
let path = utils::chat_root().join("chat.model.json");
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap()
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub act: String,
pub prompt: String,
}
#[command]
pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![];
for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap();
list.push(record);
}
list
}

View File

@@ -0,0 +1,123 @@
// https://github.com/tauri-apps/tauri-plugin-fs-extra/blob/dev/src/lib.rs
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use serde::{ser::Serializer, Serialize};
use std::{
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
};
use tauri::command;
#[cfg(unix)]
use std::os::unix::fs::{MetadataExt, PermissionsExt};
#[cfg(windows)]
use std::os::windows::fs::MetadataExt;
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Permissions {
readonly: bool,
#[cfg(unix)]
mode: u32,
}
#[cfg(unix)]
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct UnixMetadata {
dev: u64,
ino: u64,
mode: u32,
nlink: u64,
uid: u32,
gid: u32,
rdev: u64,
blksize: u64,
blocks: u64,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
accessed_at_ms: u64,
created_at_ms: u64,
modified_at_ms: u64,
is_dir: bool,
is_file: bool,
is_symlink: bool,
size: u64,
permissions: Permissions,
#[cfg(unix)]
#[serde(flatten)]
unix: UnixMetadata,
#[cfg(windows)]
file_attributes: u32,
}
fn system_time_to_ms(time: std::io::Result<SystemTime>) -> u64 {
time.map(|t| {
let duration_since_epoch = t.duration_since(UNIX_EPOCH).unwrap();
duration_since_epoch.as_millis() as u64
})
.unwrap_or_default()
}
#[command]
pub async fn metadata(path: PathBuf) -> Result<Metadata> {
let metadata = std::fs::metadata(path)?;
let file_type = metadata.file_type();
let permissions = metadata.permissions();
Ok(Metadata {
accessed_at_ms: system_time_to_ms(metadata.accessed()),
created_at_ms: system_time_to_ms(metadata.created()),
modified_at_ms: system_time_to_ms(metadata.modified()),
is_dir: file_type.is_dir(),
is_file: file_type.is_file(),
is_symlink: file_type.is_symlink(),
size: metadata.len(),
permissions: Permissions {
readonly: permissions.readonly(),
#[cfg(unix)]
mode: permissions.mode(),
},
#[cfg(unix)]
unix: UnixMetadata {
dev: metadata.dev(),
ino: metadata.ino(),
mode: metadata.mode(),
nlink: metadata.nlink(),
uid: metadata.uid(),
gid: metadata.gid(),
rdev: metadata.rdev(),
blksize: metadata.blksize(),
blocks: metadata.blocks(),
},
#[cfg(windows)]
file_attributes: metadata.file_attributes(),
})
}
// #[command]
// pub async fn exists(path: PathBuf) -> bool {
// path.exists()
// }

View File

@@ -4,7 +4,7 @@ use crate::{
};
use tauri::{
AboutMetadata, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray,
SystemTrayEvent, SystemTrayMenu, WindowMenuEvent, SystemTrayMenuItem,
SystemTrayEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent,
};
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
@@ -138,6 +138,10 @@ pub fn init() -> Menu {
let help_menu = Submenu::new(
"Help",
Menu::new()
.add_item(CustomMenuItem::new(
"chatgpt_log".to_string(),
"ChatGPT Log",
))
.add_item(CustomMenuItem::new("update_log".to_string(), "Update Log"))
.add_item(CustomMenuItem::new("report_bug".to_string(), "Report Bug"))
.add_item(
@@ -226,6 +230,7 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
)
.unwrap(),
// Help
"chatgpt_log" => utils::open_file(utils::chat_root().join("chatgpt.log")),
"update_log" => open(&app, conf::UPDATE_LOG_URL.to_string()),
"report_bug" => open(&app, conf::ISSUES_URL.to_string()),
"dev_tools" => {
@@ -240,11 +245,20 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
pub fn tray_menu() -> SystemTray {
SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new("control_center".to_string(), "Control Center"))
.add_item(CustomMenuItem::new("show_dock_icon".to_string(), "Show Dock Icon"))
.add_item(CustomMenuItem::new("hide_dock_icon".to_string(), "Hide Dock Icon"))
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT"))
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
)
}
@@ -276,25 +290,19 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
"control_center" => app.get_window("main").unwrap().show().unwrap(),
"restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => {
ChatConfJson::amend(
&serde_json::json!({ "hide_dock_icon": false }),
Some(app),
)
.unwrap();
},
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))
.unwrap();
}
"hide_dock_icon" => {
let chat_conf = conf::ChatConfJson::get_chat_conf();
if !chat_conf.hide_dock_icon {
ChatConfJson::amend(
&serde_json::json!({ "hide_dock_icon": true }),
Some(app),
)
.unwrap();
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": true }), Some(app))
.unwrap();
}
},
}
"quit" => std::process::exit(0),
_ => (),
}
},
_ => (),
}
}

View File

@@ -1,4 +1,5 @@
pub mod cmd;
pub mod fs_extra;
pub mod menu;
pub mod setup;
pub mod window;

View File

@@ -34,7 +34,8 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
.build().unwrap();
.build()
.unwrap();
#[cfg(not(target_os = "macos"))]
WindowBuilder::new(&app, "core", WindowUrl::App(url.into()))
@@ -51,7 +52,8 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
.build().unwrap();
.build()
.unwrap();
});
}

View File

@@ -62,8 +62,10 @@ function init() {
async function cmdTip() {
const chatModelJson = await invoke('get_chat_model') || {};
if (!chatModelJson.data && chatModelJson.data.length <= 0) return;
const data = chatModelJson.data || [];
const user_custom = chatModelJson.user_custom || [];
const sys_sync_prompts = chatModelJson.sys_sync_prompts || [];
const data = [...user_custom, ...sys_sync_prompts];
if (data.length <= 0) return;
const modelDom = document.createElement('div');
modelDom.classList.add('chat-model-cmd-list');
@@ -74,7 +76,7 @@ async function cmdTip() {
}
document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" data-prompt="${encodeURIComponent(v.prompt)}"><b>/${v.cmd}</b><i>${v.act}</i></div>`;
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.

View File

@@ -7,15 +7,45 @@ mod app;
mod conf;
mod utils;
use app::{cmd, menu, setup};
use app::{cmd, fs_extra, menu, setup};
use conf::{ChatConfJson, ChatState};
use tauri::api::path;
use tauri_plugin_log::{
fern::colors::{Color, ColoredLevelConfig},
LogTarget, LoggerBuilder,
};
fn main() {
ChatConfJson::init();
let chat_conf = ChatConfJson::get_chat_conf();
let context = tauri::generate_context!();
let colors = ColoredLevelConfig {
error: Color::Red,
warn: Color::Yellow,
debug: Color::Blue,
info: Color::BrightGreen,
trace: Color::Cyan,
};
tauri::Builder::default()
// https://github.com/tauri-apps/tauri/pull/2736
.plugin(
LoggerBuilder::new()
.level(if cfg!(debug_assertions) {
log::LevelFilter::Debug
} else {
log::LevelFilter::Trace
})
.with_colors(colors)
.targets([
// LogTarget::LogDir,
// LOG PATH: ~/.chatgpt/ChatGPT.log
LogTarget::Folder(path::home_dir().unwrap().join(".chatgpt")),
LogTarget::Stdout,
LogTarget::Webview,
])
.build(),
)
.manage(ChatState::default(chat_conf))
.invoke_handler(tauri::generate_handler![
cmd::drag_window,
@@ -28,6 +58,8 @@ fn main() {
cmd::form_msg,
cmd::open_file,
cmd::get_chat_model,
cmd::parse_prompt,
fs_extra::metadata,
])
.setup(setup::init)
.plugin(tauri_plugin_positioner::init())

View File

@@ -1,4 +1,5 @@
use anyhow::Result;
use log::info;
use std::{
fs::{self, File},
path::{Path, PathBuf},
@@ -48,6 +49,7 @@ pub fn user_script() -> String {
}
pub fn open_file(path: PathBuf) {
info!("open_file: {}", path.to_string_lossy());
#[cfg(target_os = "macos")]
Command::new("open").arg("-R").arg(path).spawn().unwrap();

View File

@@ -7,11 +7,16 @@
},
"package": {
"productName": "ChatGPT",
"version": "0.4.1"
"version": "0.5.1"
},
"tauri": {
"allowlist": {
"all": true,
"http": {
"scope": [
"https://raw.githubusercontent.com/*"
]
},
"fs": {
"all": true,
"scope": [
@@ -51,13 +56,19 @@
"shortDescription": "ChatGPT",
"targets": "all",
"windows": {
"webviewInstallMode": {
"silent": true,
"type": "downloadBootstrapper"
},
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
"timestampUrl": "",
"webviewInstallMode": {
"silent": true,
"type": "embedBootstrapper"
},
"wix": {
"language": [
"zh-CN",
"en-US"
]
}
}
},
"security": {

View File

@@ -4,20 +4,20 @@ import { clone } from 'lodash';
import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit';
export default function useChatModel() {
export default function useChatModel(key: string) {
const [modelJson, setModelJson] = useState<Record<string, any>>({});
useInit(async () => {
const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', data: [] });
const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', [key]: [] });
setModelJson(data);
});
const modelSet = async (data: Record<string, any>[]) => {
const oData = clone(modelJson);
oData.data = data;
oData[key] = data;
await writeJSON(CHAT_MODEL_JSON, oData);
setModelJson(oData);
}
return { modelJson, modelSet, modelData: modelJson?.data || [] }
return { modelJson, modelSet, modelData: modelJson?.[key] || [] }
}

12
src/hooks/useData.ts vendored
View File

@@ -7,9 +7,8 @@ export default function useData(oData: any[]) {
const [opData, setData] = useState<any[]>([]);
useEffect(() => {
const nData = oData.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData);
}, [oData])
opInit(oData);
}, [])
const opAdd = (val: any) => {
const v = [val, ...opData];
@@ -17,6 +16,11 @@ export default function useData(oData: any[]) {
return v;
};
const opInit = (val: any[] = []) => {
const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData);
};
const opRemove = (id: string) => {
const nData = opData.filter(i => i[safeKey] !== id);
setData(nData);
@@ -31,5 +35,5 @@ export default function useData(oData: any[]) {
return nData;
};
return { opSafeKey: safeKey, opReplace, opAdd, opRemove, opData };
return { opSafeKey: safeKey, opInit, opReplace, opAdd, opRemove, opData };
}

View File

@@ -8,5 +8,5 @@ export default function useInit(callback: () => void) {
callback();
isInit.current = false;
}
}, [])
})
}

View File

@@ -8,12 +8,19 @@
}
}
.ant-layout-sider-trigger {
user-select: none;
-webkit-user-select: none;
}
.chat-container {
padding: 20px;
overflow: hidden;
}
.ant-menu {
user-select: none;
-webkit-user-select: none;
.ant-menu-item {
background-color: #f8f8f8;
}

35
src/layout/index.tsx vendored
View File

@@ -17,13 +17,38 @@ const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
const go = useNavigate();
return (
<Layout style={{ minHeight: '100vh' }}>
<Sider theme="light" collapsible collapsed={collapsed} onCollapse={(value) => setCollapsed(value)}>
<Layout style={{ minHeight: '100vh' }} hasSider>
<Sider
theme="light"
collapsible
collapsed={collapsed}
onCollapse={(value) => setCollapsed(value)}
style={{
overflow: 'auto',
height: '100vh',
position: 'fixed',
left: 0,
top: 0,
bottom: 0,
zIndex: 999,
}}
>
<div className="chat-logo"><img src="/logo.png" /></div>
<Menu defaultSelectedKeys={[location.pathname]} mode="vertical" items={menuItems} onClick={(i) => go(i.key)} />
<Menu
defaultSelectedKeys={[location.pathname]}
mode="inline"
inlineIndent={12}
items={menuItems}
onClick={(i) => go(i.key)}
/>
</Sider>
<Layout className="chat-layout">
<Content className="chat-container">
<Layout className="chat-layout" style={{ marginLeft: collapsed ? 80 : 200, transition: 'margin-left 300ms ease-out' }}>
<Content
className="chat-container"
style={{
overflow: 'inherit'
}}
>
<Routes />
</Content>
<Footer style={{ textAlign: 'center' }}>

36
src/routes.tsx vendored
View File

@@ -2,19 +2,28 @@ import { useRoutes } from 'react-router-dom';
import {
DesktopOutlined,
BulbOutlined,
SyncOutlined,
UserOutlined,
} from '@ant-design/icons';
import type { RouteObject } from 'react-router-dom';
import type { MenuProps } from 'antd';
import General from '@view/General';
import LanguageModel from '@/view/LanguageModel';
import SyncPrompts from '@/view/SyncPrompts';
export type ChatRouteObject = {
export type ChatRouteMetaObject = {
label: string;
icon?: React.ReactNode,
};
export const routes: Array<RouteObject & { meta: ChatRouteObject }> = [
type ChatRouteObject = {
path: string;
element?: JSX.Element;
meta: ChatRouteMetaObject;
children?: ChatRouteObject[];
}
export const routes: Array<ChatRouteObject> = [
{
path: '/',
element: <General />,
@@ -25,11 +34,28 @@ export const routes: Array<RouteObject & { meta: ChatRouteObject }> = [
},
{
path: '/language-model',
element: <LanguageModel />,
meta: {
label: 'Language Model',
icon: <BulbOutlined />,
},
children: [
{
path: 'user-custom',
element: <LanguageModel />,
meta: {
label: 'User Custom',
icon: <UserOutlined />,
},
},
{
path: 'sync-prompts',
element: <SyncPrompts />,
meta: {
label: 'Sync Prompts',
icon: <SyncOutlined />,
},
},
]
},
];
@@ -37,6 +63,8 @@ type MenuItem = Required<MenuProps>['items'][number];
export const menuItems: MenuItem[] = routes.map(i => ({
...i.meta,
key: i.path || '',
children: i?.children?.map((j) =>
({ ...j.meta, key: `${i.path}/${j.path}` || ''})),
}));
export default () => {

14
src/utils.ts vendored
View File

@@ -1,7 +1,10 @@
import { readTextFile, writeTextFile, exists } from '@tauri-apps/api/fs';
import { homeDir, join } from '@tauri-apps/api/path';
import dayjs from 'dayjs';
export const CHAT_MODEL_JSON = 'chat.model.json';
export const CHAT_PROMPTS_CSV = 'chat.prompts.csv';
export const GITHUB_PROMPTS_CSV_URL = 'https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv';
export const DISABLE_AUTO_COMPLETE = {
autoCapitalize: 'off',
autoComplete: 'off',
@@ -12,10 +15,14 @@ export const chatRoot = async () => {
return join(await homeDir(), '.chatgpt')
}
export const chatModelPath = async () => {
export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON);
}
export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV);
}
export const readJSON = async (path: string, defaultVal = {}) => {
const root = await chatRoot();
const file = await join(root, path);
@@ -24,7 +31,6 @@ export const readJSON = async (path: string, defaultVal = {}) => {
writeTextFile(file, JSON.stringify({
name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT/blob/main/chat.model.md',
data: null,
...defaultVal,
}, null, 2))
}
@@ -40,4 +46,6 @@ export const writeJSON = async (path: string, data: Record<string, any>) => {
const root = await chatRoot();
const file = await join(root, path);
await writeTextFile(file, JSON.stringify(data, null, 2));
}
}
export const fmtDate = (date: any) => dayjs(date).format('YYYY-MM-DD HH:mm:ss');

View File

@@ -1,4 +1,4 @@
import { Tag, Switch, Tooltip, Space } from 'antd';
import { Tag, Switch, Tooltip, Space, Popconfirm } from 'antd';
export const modelColumns = () => [
{
@@ -29,7 +29,9 @@ export const modelColumns = () => [
dataIndex: 'enable',
key: 'enable',
width: 80,
render: (v: boolean = false) => <Switch checked={v} disabled />,
render: (v: boolean = false, row: Record<string, any>, action: Record<string, any>) => (
<Switch checked={v} onChange={(v) => action.setRecord({ ...row, enable: v }, 'enable')} />
),
},
{
title: 'Prompt',
@@ -48,7 +50,14 @@ export const modelColumns = () => [
render: (_: any, row: any, actions: any) => (
<Space size="middle">
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<a onClick={() => actions.setRecord(row, 'delete')}>Delete</a>
<Popconfirm
title="Are you sure to delete this model?"
onConfirm={() => actions.setRecord(row, 'delete')}
okText="Yes"
cancelText="No"
>
<a>Delete</a>
</Popconfirm>
</Space>
),
}

View File

@@ -3,9 +3,9 @@ import { Table, Button, Modal, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useChatModel from '@/hooks/useChatModel';
import useColumns from '@/hooks/useColumns';
import useData from '@/hooks/useData';
import { chatModelPath } from '@/utils';
import { modelColumns } from './config';
import LanguageModelForm from './Form';
@@ -14,11 +14,16 @@ import './index.scss';
export default function LanguageModel() {
const [isVisible, setVisible] = useState(false);
const [modelPath, setChatModelPath] = useState('');
const { modelData, modelSet } = useChatModel();
const { opData, opAdd, opRemove, opReplace, opSafeKey } = useData(modelData);
const { modelData, modelSet } = useChatModel('user_custom');
const { opData, opInit, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
const formRef = useRef<any>(null);
useEffect(() => {
if (modelData.length <= 0) return;
opInit(modelData);
}, [modelData])
useEffect(() => {
if (!opInfo.opType) return;
if (['edit', 'new'].includes(opInfo.opType)) {
@@ -31,6 +36,13 @@ export default function LanguageModel() {
}
}, [opInfo.opType, formRef]);
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelSet(data);
}
}, [opInfo.opTime])
useInit(async () => {
const path = await chatModelPath();
setChatModelPath(path);

47
src/view/SyncPrompts/config.tsx vendored Normal file
View File

@@ -0,0 +1,47 @@
import { Switch, Tag, Tooltip } from 'antd';
export const genCmd = (act: string) => act.replace(/\s+|\/+/g, '_').replace(/[^\d\w]/g, '').toLocaleLowerCase();
export const modelColumns = () => [
{
title: '/{cmd}',
dataIndex: 'cmd',
fixed: 'left',
// width: 120,
key: 'cmd',
render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag>
),
},
{
title: 'Act',
dataIndex: 'act',
key: 'act',
// width: 200,
},
{
title: 'Tags',
dataIndex: 'tags',
key: 'tags',
// width: 150,
render: () => <Tag>chatgpt-prompts</Tag>,
},
{
title: 'Enable',
dataIndex: 'enable',
key: 'enable',
// width: 80,
render: (v: boolean = false, row: Record<string, any>, action: Record<string, any>) => (
<Switch checked={v} onChange={(v) => action.setRecord({ ...row, enable: v }, 'enable')} />
),
},
{
title: 'Prompt',
dataIndex: 'prompt',
key: 'prompt',
// width: 300,
render: (v: string) => (
<Tooltip overlayInnerStyle={{ width: 350 }} title={v}><span className="chat-prompts-val">{v}</span></Tooltip>
),
},
];

28
src/view/SyncPrompts/index.scss vendored Normal file
View File

@@ -0,0 +1,28 @@
.chat-prompts-tags {
.ant-tag {
margin: 2px;
}
}
.add-btn {
margin-bottom: 5px;
}
.chat-model-path {
font-size: 12px;
font-weight: bold;
color: #888;
margin-bottom: 5px;
span {
display: inline-block;
// background-color: #d8d8d8;
color: #4096ff;
padding: 0 8px;
height: 20px;
line-height: 20px;
border-radius: 4px;
cursor: pointer;
text-decoration: underline;
}
}

91
src/view/SyncPrompts/index.tsx vendored Normal file
View File

@@ -0,0 +1,91 @@
import { useEffect, useState } from 'react';
import { Table, Button, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import { fetch, ResponseType } from '@tauri-apps/api/http';
import { writeTextFile, readTextFile } from '@tauri-apps/api/fs';
import useInit from '@/hooks/useInit';
import useColumns from '@/hooks/useColumns';
import useData from '@/hooks/useData';
import useChatModel from '@/hooks/useChatModel';
import { fmtDate, chatPromptsPath, GITHUB_PROMPTS_CSV_URL } from '@/utils';
import { modelColumns, genCmd } from './config';
import './index.scss';
const promptsURL = 'https://github.com/f/awesome-chatgpt-prompts/blob/main/prompts.csv';
export default function LanguageModel() {
const [loading, setLoading] = useState(false);
const [lastUpdated, setLastUpdated] = useState();
const { modelJson, modelSet } = useChatModel('sys_sync_prompts');
const { opData, opInit, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
// useInit(async () => {
// // const filename = await chatPromptsPath();
// // const data = await readTextFile(filename);
// // const list: Record<string, string>[] = await invoke('parse_prompt', { data });
// // const fileData: Record<string, any> = await invoke('metadata', { path: filename });
// // setLastUpdated(fileData.accessedAtMs);
// // opInit(list);
// console.log('«31» /view/SyncPrompts/index.tsx ~> ', modelJson);
// opInit([]);
// })
useEffect(() => {
if (!modelJson?.sys_sync_prompts) return;
opInit(modelJson?.sys_sync_prompts)
}, [modelJson?.sys_sync_prompts])
const handleSync = async () => {
setLoading(true);
const res = await fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
await writeTextFile(await chatPromptsPath(), data);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
opInit(list);
modelSet(list.map(i => ({ cmd: genCmd(i.act), enable: true, tags: ['chatgpt-prompts'], ...i })));
setLastUpdated(fmtDate(Date.now()) as any);
message.success('ChatGPT Prompts data has been synchronized!');
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
}
setLoading(false);
};
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelSet(data);
}
}, [opInfo.opTime]);
return (
<div>
<Button type="primary" loading={loading} onClick={handleSync}>Sync</Button>
{lastUpdated && <span style={{ marginLeft: 10, color: '#999' }}>Last updated on {fmtDate(lastUpdated)}</span>}
<div className="chat-model-path">URL: <a href={promptsURL} target="_blank">{promptsURL}</a></div>
<Table
key={lastUpdated}
rowKey="act"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
pagination={{
hideOnSinglePage: true,
showSizeChanger: true,
showQuickJumper: true,
defaultPageSize: 5,
pageSizeOptions: [5, 10, 15, 20],
showTotal: (total) => <span>Total {total} items</span>,
}}
/>
</div>
)
}