Compare commits

...

71 Commits

Author SHA1 Message Date
lencx
dc769235a7 v0.7.4 2022-12-31 20:11:21 +08:00
lencx
52cc029b01 readme 2022-12-31 20:11:15 +08:00
lencx
39dc007513 readme 2022-12-31 20:08:11 +08:00
lencx
ba1fe9a603 refactor: global shortcut 2022-12-31 20:07:09 +08:00
lencx
e1f8030009 refactor: prompts sync 2022-12-31 18:01:31 +08:00
lencx
9a392a71f6 chore: conf path 2022-12-31 12:43:47 +08:00
lencx
dc0c78fee2 fix: customize global shortcuts (#108) 2022-12-30 23:46:30 +08:00
lencx
b3bd54ce81 Merge pull request #101 from lencx/dev 2022-12-29 21:31:43 +08:00
lencx
ba21fa85d2 v0.7.3 2022-12-29 21:06:40 +08:00
lencx
2ab35bb925 readme 2022-12-29 21:06:29 +08:00
lencx
9cacad0120 chore: optim style 2022-12-29 21:05:21 +08:00
lencx
f1fa859961 chore: optim 2022-12-29 19:29:33 +08:00
lencx
9a9fb24de8 chore: icon & global shortcuts 2022-12-29 01:42:00 +08:00
lencx
3424666ec9 chore: tauri conf 2022-12-29 01:42:00 +08:00
lencx
416bf7064c Merge pull request #97 from Sturlen/main 2022-12-28 18:22:32 +08:00
Sturlen
f5cf3acd3a fix: matching action button colors 2022-12-28 10:47:30 +01:00
lencx
975ffd2d84 Merge pull request #92 from lencx/dev 2022-12-28 03:49:04 +08:00
lencx
145264719f chore: action 2022-12-28 03:35:08 +08:00
lencx
a929376cb2 v0.7.2 2022-12-28 03:34:05 +08:00
lencx
478049e23e fix: windows can't start 2022-12-28 03:33:40 +08:00
lencx
631dee95a7 fix: windows can't start 2022-12-28 03:07:17 +08:00
lencx
c4ff0b4107 Merge pull request #91 from lencx/dev 2022-12-28 01:44:22 +08:00
lencx
bcd350584e v0.7.1 2022-12-28 01:25:49 +08:00
lencx
050045f644 chore: action 2022-12-28 01:25:28 +08:00
lencx
7e9440b45e readme 2022-12-28 01:13:22 +08:00
lencx
cd9c0ac742 fix: windows can't start (#85) 2022-12-28 01:13:00 +08:00
lencx
2d018c4967 fix: tray icon (#87) 2022-12-28 01:12:03 +08:00
lencx
f4d3cc6c8e Merge branch 'main' of github.com:lencx/ChatGPT 2022-12-27 21:33:29 +08:00
xueweiwujxw
cd6cece45e fix(src-tauri/src/app/menu.rs): warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 21:29:46 +08:00
lencx
54b5b63f0e v0.7.0 2022-12-27 21:29:11 +08:00
lencx
680f1b01ad readme 2022-12-27 21:29:11 +08:00
lencx
078b0296f5 chore: cmd 2022-12-27 21:29:11 +08:00
lencx
c956758a4a readme 2022-12-27 21:29:11 +08:00
lencx
477120ef3b feat: use the keyboard to select the slash command 2022-12-27 21:29:11 +08:00
lencx
0ee95630ef Merge pull request #86 from xueweiwujxw/menu-warn 2022-12-27 21:15:28 +08:00
xueweiwujxw
fb0319a977 🐞 fix(src-tauri/src/app/menu.rs): fix warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 20:50:23 +08:00
lencx
ea1a78abf5 Merge pull request #84 from lencx/dev 2022-12-27 15:31:56 +08:00
lencx
3428e11b85 v0.7.0 2022-12-27 15:15:47 +08:00
lencx
0e0771d0ec readme 2022-12-27 15:14:49 +08:00
lencx
d78e2ad0b3 chore: cmd 2022-12-27 15:14:24 +08:00
lencx
ae31da0b29 readme 2022-12-27 14:54:28 +08:00
lencx
39febe759e feat: use the keyboard to select the slash command 2022-12-27 14:54:28 +08:00
lencx
06ee907199 Merge pull request #81 from beilunyang/patch-1 2022-12-27 12:17:58 +08:00
BeilunYang
f8c1ca5c56 fix(build): mac m1 chip copy/paste 2022-12-27 11:34:16 +08:00
lencx
6da58269bd Merge pull request #79 from weltonrodrigo/patch-1 2022-12-26 10:19:28 +08:00
Welton Rodrigo Torres Nascimento
4bf6c61bee bump homebrew cask to 0.6.10 2022-12-25 20:29:47 -03:00
lencx
a07c85a9cc Merge pull request #78 from lencx/dev 2022-12-25 09:22:15 +08:00
lencx
95a9f12b68 v0.6.10 2022-12-25 08:54:11 +08:00
lencx
252b0f3e15 fix: windows sync 2022-12-25 08:53:58 +08:00
lencx
ed268b32b3 Merge pull request #77 from lencx/dev 2022-12-25 02:27:58 +08:00
lencx
e2319f2fda v0.6.9 2022-12-25 02:01:51 +08:00
lencx
9ec69631f3 readme 2022-12-25 02:01:47 +08:00
lencx
83437ffea7 Merge pull request #73 from lencx/dev 2022-12-24 22:56:30 +08:00
lencx
be9846dc22 readme 2022-12-24 22:31:03 +08:00
lencx
f071e0d6bc v0.6.8 2022-12-24 22:30:28 +08:00
lencx
62a176d20c Merge pull request #72 from lencx/dev 2022-12-24 21:51:21 +08:00
lencx
2f8ff36638 v0.6.7 2022-12-24 21:37:35 +08:00
lencx
fe236e3c66 Merge pull request #71 from lencx/dev 2022-12-24 21:20:41 +08:00
lencx
38e319a215 v0.6.6 2022-12-24 21:06:16 +08:00
lencx
05057d06ad fix: unable to synchronize 2022-12-24 21:05:51 +08:00
lencx
0b0b832130 Merge pull request #70 from lencx/dev 2022-12-24 20:26:23 +08:00
lencx
413d3354c7 v0.6.5 2022-12-24 20:07:07 +08:00
lencx
f1c7fff800 readme 2022-12-24 20:06:56 +08:00
lencx
6fe90dea5b fix: path not allowed on the configured scope (#64) 2022-12-24 20:04:24 +08:00
lencx
25ab2b0368 chore: optim 2022-12-24 20:04:14 +08:00
lencx
94973b1420 Merge pull request #69 from JacobLinCool/patch-1 2022-12-24 01:10:52 +08:00
JacobLinCool
0930cd782a docs: fix cask name in brewfile section 2022-12-24 00:28:15 +08:00
lencx
0733bba4bf Merge pull request #67 from lencx/fix 2022-12-23 23:07:54 +08:00
lencx
f411541a76 Merge pull request #66 from lencx/fix 2022-12-23 22:41:09 +08:00
lencx
a75ae5e615 Merge pull request #65 from lencx/fix 2022-12-23 21:46:01 +08:00
lencx
7b8f29534b Merge pull request #63 from lencx/fix 2022-12-23 20:21:34 +08:00
30 changed files with 686 additions and 300 deletions

View File

@@ -8,7 +8,7 @@ on:
jobs: jobs:
create-release: create-release:
runs-on: ubuntu-latest runs-on: ubuntu-20.04
outputs: outputs:
RELEASE_UPLOAD_ID: ${{ steps.create_release.outputs.id }} RELEASE_UPLOAD_ID: ${{ steps.create_release.outputs.id }}
@@ -36,34 +36,32 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
platform: [macos-latest, ubuntu-latest, windows-latest] platform: [macos-latest, ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: setup node
- name: Setup node uses: actions/setup-node@v3
uses: actions/setup-node@v1
with: with:
node-version: 18 node-version: 16
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
# Rust cache
- uses: Swatinem/rust-cache@v1
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
- name: install dependencies (ubuntu only) - name: install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-latest' if: matrix.platform == 'ubuntu-20.04'
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libgtk-3-dev webkit2gtk-4.0 libappindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install app dependencies and build it - name: Install app dependencies and build it
run: yarn && yarn build:fe run: yarn && yarn build:fe
- name: fix tray icon
if: matrix.platform != 'macos-latest'
run: |
yarn fix:tray
- uses: tauri-apps/tauri-action@v0.3 - uses: tauri-apps/tauri-action@v0.3
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -74,7 +72,7 @@ jobs:
releaseId: ${{ needs.create-release.outputs.RELEASE_UPLOAD_ID }} releaseId: ${{ needs.create-release.outputs.RELEASE_UPLOAD_ID }}
updater: updater:
runs-on: ubuntu-latest runs-on: ubuntu-20.04
needs: [create-release, build-tauri] needs: [create-release, build-tauri]
steps: steps:

View File

@@ -22,9 +22,9 @@
**最新版:** **最新版:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg) - `Mac`: [ChatGPT_0.7.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb) - `Linux`: [chat-gpt_0.7.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/chat-gpt_0.7.4_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi) - `Windows`: [ChatGPT_0.7.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases) [其他版本...](https://github.com/lencx/ChatGPT/releases)
@@ -58,9 +58,9 @@ cask "popcorn-time", args: { "no-quarantine": true }
![chat cmd](./assets/chat-cmd-1.png) ![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png) ![chat cmd](./assets/chat-cmd-2.png)
数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 <!-- 数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 -->
在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。斜杠命令下,使用 TAB 键修改 `{q}` 标签内容(仅支持单个修改 [#54](https://github.com/lencx/ChatGPT/issues/54))。 在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。斜杠命令下,使用 TAB 键修改 `{q}` 标签内容(仅支持单个修改 [#54](https://github.com/lencx/ChatGPT/issues/54))。使用键盘 `⇧``⇩`(上下键)来选择斜杠指令。
![chatgpt](assets/chatgpt.gif) ![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif) ![chatgpt-cmd](assets/chatgpt-cmd.gif)
@@ -74,6 +74,7 @@ cask "popcorn-time", args: { "no-quarantine": true }
- 系统托盘悬浮窗 - 系统托盘悬浮窗
- 应用菜单功能强大 - 应用菜单功能强大
- 支持斜杠命令及其配置(可手动配置或从文件同步 [#55](https://github.com/lencx/ChatGPT/issues/55) - 支持斜杠命令及其配置(可手动配置或从文件同步 [#55](https://github.com/lencx/ChatGPT/issues/55)
- 自定义全局快捷键 ([#108](https://github.com/lencx/ChatGPT/issues/108))
### 菜单项 ### 菜单项

View File

@@ -24,9 +24,9 @@
**Latest:** **Latest:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg) - `Mac`: [ChatGPT_0.7.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb) - `Linux`: [chat-gpt_0.7.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/chat-gpt_0.7.4_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi) - `Windows`: [ChatGPT_0.7.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases) [Other version...](https://github.com/lencx/ChatGPT/releases)
@@ -46,7 +46,7 @@ Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usag
```rb ```rb
repo = "lencx/chatgpt" repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git" tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true } cask "chatgpt", args: { "no-quarantine": true }
``` ```
## 📢 Announcement ## 📢 Announcement
@@ -60,9 +60,9 @@ You can look at [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-p
![chat cmd](./assets/chat-cmd-1.png) ![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png) ![chat cmd](./assets/chat-cmd-2.png)
After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). <!-- After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). -->
In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key. Under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported [#54](https://github.com/lencx/ChatGPT/issues/54)). In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key. Under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported [#54](https://github.com/lencx/ChatGPT/issues/54)). Use the keyboard `⇧` (arrow up) and `⇩` (arrow down) keys to select the slash command.
![chatgpt](assets/chatgpt.gif) ![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif) ![chatgpt-cmd](assets/chatgpt-cmd.gif)
@@ -76,6 +76,7 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
- System tray hover window - System tray hover window
- Powerful menu items - Powerful menu items
- Support for slash commands and their configuration (can be configured manually or synchronized from a file [#55](https://github.com/lencx/ChatGPT/issues/55)) - Support for slash commands and their configuration (can be configured manually or synchronized from a file [#55](https://github.com/lencx/ChatGPT/issues/55))
- Customize global shortcuts ([#108](https://github.com/lencx/ChatGPT/issues/108))
### MenuItem ### MenuItem
@@ -154,7 +155,7 @@ Currently, only json and csv are supported for synchronizing custom files, and t
## TODO ## TODO
- Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20)) <!-- - Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20)) -->
- `Control Center` - Feature Enhancements - `Control Center` - Feature Enhancements
- ... - ...

View File

@@ -1,5 +1,43 @@
# UPDATE LOG # UPDATE LOG
## v0.7.4
fix:
- trying to resolve linux errors: `error while loading shared libraries`
- customize global shortcuts (`Menu -> Preferences -> Control Center -> General -> Global Shortcut`)
## v0.7.3
chore:
- optimize slash command style
- optimize tray menu icon and button icons
- global shortcuts to the chatgpt app (mac: `Command + Shift + O`, windows: `Ctrl + Shift + O`)
## v0.7.2
fix: some windows systems cannot start the application
## v0.7.1
fix:
- some windows systems cannot start the application
- windows and linux add about menu (show version information)
- the tray icon is indistinguishable from the background in dark mode on window and linux
## v0.7.0
fix:
- mac m1 copy/paste does not work on some system versions
- optimize the save chat log button to a small icon, the tray window no longer provides a save chat log button (the buttons causes the input area to become larger and the content area to become smaller)
feat:
- use the keyboard `⇧` (arrow up) and `⇩` (arrow down) keys to select the slash command
<!-- - global shortcuts to the chatgpt app (mac: command+shift+o, windows: ctrl+shift+o) -->
## v0.6.10
fix: sync failure on windows
## v0.6.4 ## v0.6.4
fix: path not allowed on the configured scope fix: path not allowed on the configured scope

View File

@@ -1,6 +1,6 @@
cask "chatgpt" do cask "chatgpt" do
version "0.1.7" version "0.6.10"
sha256 "1320b30a67e2506f9b45ffd2a48243d6141171c231dd698994ae5156a637eb3f" sha256 "e85062565f826d32219c53b184d6df9c89441d4231cdfff775c2de8c50ac9906"
url "https://github.com/lencx/ChatGPT/releases/download/v#{version}/ChatGPT_#{version}_x64.dmg" url "https://github.com/lencx/ChatGPT/releases/download/v#{version}/ChatGPT_#{version}_x64.dmg"
name "ChatGPT" name "ChatGPT"

View File

@@ -8,6 +8,8 @@
"build": "yarn tauri build", "build": "yarn tauri build",
"updater": "tr updater", "updater": "tr updater",
"release": "tr release --git", "release": "tr release --git",
"fix:tray": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon-light.png\" --json.tauri_systemTray_iconAsTemplate=false",
"fix:tray:mac": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon.png\" --json.tauri_systemTray_iconAsTemplate=true",
"download": "node ./scripts/download.js", "download": "node ./scripts/download.js",
"tr": "tr", "tr": "tr",
"tauri": "tauri" "tauri": "tauri"

View File

@@ -17,14 +17,16 @@ tauri-build = {version = "1.2.1", features = [] }
anyhow = "1.0.66" anyhow = "1.0.66"
serde_json = "1.0" serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] } tauri = { version = "1.2.3", features = ["api-all", "devtools", "global-shortcut", "system-tray", "updater"] }
tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] } tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] }
log = "0.4.17" log = "0.4.17"
csv = "1.1.6" csv = "1.1.6"
thiserror = "1.0.38" thiserror = "1.0.38"
walkdir = "2.3.2" walkdir = "2.3.2"
# tokio = { version = "1.23.0", features = ["macros"] } regex = "1.7.0"
# reqwest = "0.11.13" tokio = { version = "1.23.0", features = ["macros"] }
reqwest = "0.11.13"
wry = "0.23.4"
[dependencies.tauri-plugin-log] [dependencies.tauri-plugin-log]
git = "https://github.com/tauri-apps/tauri-plugin-log" git = "https://github.com/tauri-apps/tauri-plugin-log"
@@ -44,4 +46,4 @@ custom-protocol = [ "tauri/custom-protocol" ]
[profile.release] [profile.release]
strip = true strip = true
lto = true lto = true
opt-level = "z" opt-level = "s"

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

View File

@@ -1,5 +1,9 @@
use crate::{conf::ChatConfJson, utils}; use crate::{
use std::{fs, path::PathBuf}; conf::{ChatConfJson, GITHUB_PROMPTS_CSV_URL},
utils::{self, exists},
};
use log::info;
use std::{collections::HashMap, fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager}; use tauri::{api, command, AppHandle, Manager};
#[command] #[command]
@@ -72,7 +76,7 @@ pub fn get_chat_model_cmd() -> serde_json::Value {
serde_json::from_str(&content).unwrap() serde_json::from_str(&content).unwrap()
} }
#[derive(Debug, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord { pub struct PromptRecord {
pub cmd: Option<String>, pub cmd: Option<String>,
pub act: String, pub act: String,
@@ -84,8 +88,17 @@ pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes()); let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![]; let mut list = vec![];
for result in rdr.deserialize() { for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap(); let record: PromptRecord = result.unwrap_or_else(|err| {
list.push(record); info!("parse_prompt_error: {}", err);
PromptRecord {
cmd: None,
act: "".to_string(),
prompt: "".to_string(),
}
});
if !record.act.is_empty() {
list.push(record);
}
} }
list list
} }
@@ -99,9 +112,8 @@ pub fn window_reload(app: AppHandle, label: &str) {
.unwrap(); .unwrap();
} }
use walkdir::WalkDir;
use utils::chat_root; use utils::chat_root;
use walkdir::WalkDir;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord { pub struct ModelRecord {
@@ -115,12 +127,14 @@ pub struct ModelRecord {
#[command] #[command]
pub fn cmd_list() -> Vec<ModelRecord> { pub fn cmd_list() -> Vec<ModelRecord> {
let mut list = vec![]; let mut list = vec![];
for entry in WalkDir::new(chat_root().join("cache_model")).into_iter().filter_map(|e| e.ok()) { for entry in WalkDir::new(chat_root().join("cache_model"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string()); let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file { if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]); let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter() let enable_list = data.into_iter().filter(|v| v.enable);
.filter(|v| v.enable);
list.extend(enable_list) list.extend(enable_list)
} }
} }
@@ -128,3 +142,135 @@ pub fn cmd_list() -> Vec<ModelRecord> {
list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len())); list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len()));
list list
} }
#[command]
pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(GITHUB_PROMPTS_CSV_URL, Some(&app))
.await
.unwrap();
if let Some(v) = res {
let data = parse_prompt(v)
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
let data2 = data.clone();
let model = chat_root().join("chat.model.json");
let model_cmd = chat_root().join("chat.model.cmd.json");
let chatgpt_prompts = chat_root().join("cache_model").join("chatgpt_prompts.json");
if !exists(&model) {
fs::write(
&model,
serde_json::json!({
"name": "ChatGPT Model",
"link": "https://github.com/lencx/ChatGPT"
})
.to_string(),
)
.unwrap();
}
// chatgpt_prompts.json
fs::write(
chatgpt_prompts,
serde_json::to_string_pretty(&data).unwrap(),
)
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
fs::write(
model_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
"data": cmd_data,
}))
.unwrap(),
)
.unwrap();
let mut kv = HashMap::new();
kv.insert(
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// refresh window
api::dialog::message(
app.get_window("core").as_ref(),
"Sync Prompts",
"ChatGPT Prompts data has been synchronized!",
);
window_reload(app, "core");
return Some(data2);
}
None
}
#[command]
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(&url, None).await.unwrap_or_else(|err| {
info!("chatgpt_http_error: {}", err);
None
});
info!("chatgpt_http_url: {}", url);
if let Some(v) = res {
let data;
if data_type == "csv" {
info!("chatgpt_http_csv_parse");
data = parse_prompt(v);
} else if data_type == "json" {
info!("chatgpt_http_json_parse");
data = serde_json::from_str(&v).unwrap_or_else(|err| {
info!("chatgpt_http_json_parse_error: {}", err);
vec![]
});
} else {
info!("chatgpt_http_unknown_type");
data = vec![];
}
let data = data
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["user-sync".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
return Some(data);
}
None
}

View File

@@ -3,30 +3,39 @@ use crate::{
utils, utils,
}; };
use tauri::{ use tauri::{
AboutMetadata, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, SystemTrayEvent,
SystemTrayEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent,
}; };
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt}; use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
#[cfg(target_os = "macos")]
use tauri::AboutMetadata;
use super::window;
// --- Menu // --- Menu
pub fn init() -> Menu { pub fn init() -> Menu {
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
let name = "ChatGPT"; let name = "ChatGPT";
let app_menu = Submenu::new( let app_menu = Submenu::new(
name, name,
Menu::new() Menu::with_items([
.add_native_item(MenuItem::About(name.into(), AboutMetadata::default())) #[cfg(target_os = "macos")]
.add_native_item(MenuItem::Services) MenuItem::About(name.into(), AboutMetadata::default()).into(),
.add_native_item(MenuItem::Separator) #[cfg(not(target_os = "macos"))]
.add_native_item(MenuItem::Hide) CustomMenuItem::new("about".to_string(), "About ChatGPT").into(),
.add_native_item(MenuItem::HideOthers) MenuItem::Services.into(),
.add_native_item(MenuItem::ShowAll) MenuItem::Hide.into(),
.add_native_item(MenuItem::Separator) MenuItem::HideOthers.into(),
.add_native_item(MenuItem::Quit), MenuItem::ShowAll.into(),
MenuItem::Separator.into(),
MenuItem::Quit.into(),
]),
); );
let stay_on_top = let stay_on_top =
CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T"); CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T");
#[cfg(target_os = "macos")]
let titlebar = let titlebar =
CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B"); CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B");
let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light"); let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light");
@@ -38,6 +47,7 @@ pub fn init() -> Menu {
} else { } else {
stay_on_top stay_on_top
}; };
#[cfg(target_os = "macos")]
let titlebar_menu = if chat_conf.titlebar { let titlebar_menu = if chat_conf.titlebar {
titlebar.selected() titlebar.selected()
} else { } else {
@@ -173,8 +183,17 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
let menu_handle = core_window.menu_handle(); let menu_handle = core_window.menu_handle();
match menu_id { match menu_id {
// App
"about" => {
let tauri_conf = utils::get_tauri_conf().unwrap();
tauri::api::dialog::message(
app.get_window("core").as_ref(),
"ChatGPT",
format!("Version {}", tauri_conf.package.version.unwrap()),
);
}
// Preferences // Preferences
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&app.env()), "restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, script_path), "inject_script" => open(&app, script_path),
"go_conf" => utils::open_file(utils::chat_root()), "go_conf" => utils::open_file(utils::chat_root()),
@@ -182,12 +201,12 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
"awesome" => open(&app, conf::AWESOME_URL.to_string()), "awesome" => open(&app, conf::AWESOME_URL.to_string()),
"sync_prompts" => { "sync_prompts" => {
tauri::api::dialog::ask( tauri::api::dialog::ask(
app.get_window("main").as_ref(), app.get_window("core").as_ref(),
"Sync Prompts", "Sync Prompts",
"Data sync will enable all prompts, are you sure you want to sync?", "Data sync will enable all prompts, are you sure you want to sync?",
move |is_restart| { move |is_restart| {
if is_restart { if is_restart {
app.get_window("main") app.get_window("core")
.unwrap() .unwrap()
.eval("window.__sync_prompts && window.__sync_prompts()") .eval("window.__sync_prompts && window.__sync_prompts()")
.unwrap() .unwrap()
@@ -260,20 +279,32 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
// --- SystemTray Menu // --- SystemTray Menu
pub fn tray_menu() -> SystemTray { pub fn tray_menu() -> SystemTray {
if cfg!(target_os = "macos") {
return SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
);
}
SystemTray::new().with_menu( SystemTray::new().with_menu(
SystemTrayMenu::new() SystemTrayMenu::new()
.add_item(CustomMenuItem::new( .add_item(CustomMenuItem::new(
"control_center".to_string(), "control_center".to_string(),
"Control Center", "Control Center",
)) ))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator) .add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")), .add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
) )
@@ -304,7 +335,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
} }
} }
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() { SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&handle.env()), "restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => { "show_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app)) ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))

View File

@@ -1,22 +1,55 @@
use crate::{app::window, conf::ChatConfJson, utils}; use crate::{app::window, conf::ChatConfJson, utils};
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, Manager}; use log::info;
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, GlobalShortcutManager, Manager};
use wry::application::accelerator::Accelerator;
pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> { pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> {
info!("stepup");
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
let url = chat_conf.origin.to_string(); let url = chat_conf.origin.to_string();
let theme = ChatConfJson::theme(); let theme = ChatConfJson::theme();
let handle = app.app_handle(); let handle = app.app_handle();
std::thread::spawn(move || { tokio::spawn(async move {
window::tray_window(&handle); window::tray_window(&handle);
}); });
if let Some(v) = chat_conf.global_shortcut {
info!("global_shortcut: `{}`", v);
match v.parse::<Accelerator>() {
Ok(_) => {
info!("global_shortcut_register");
let handle = app.app_handle();
let mut shortcut = app.global_shortcut_manager();
shortcut
.register(&v, move || {
if let Some(w) = handle.get_window("core") {
if w.is_visible().unwrap() {
w.hide().unwrap();
} else {
w.show().unwrap();
w.set_focus().unwrap();
}
}
})
.unwrap_or_else(|err| {
info!("global_shortcut_register_error: {}", err);
});
}
Err(err) => {
info!("global_shortcut_parse_error: {}", err);
}
}
} else {
info!("global_shortcut_unregister");
};
if chat_conf.hide_dock_icon { if chat_conf.hide_dock_icon {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
app.set_activation_policy(tauri::ActivationPolicy::Accessory); app.set_activation_policy(tauri::ActivationPolicy::Accessory);
} else { } else {
let app = app.handle(); let app = app.handle();
std::thread::spawn(move || { tokio::spawn(async move {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
WindowBuilder::new(&app, "core", WindowUrl::App(url.into())) WindowBuilder::new(&app, "core", WindowUrl::App(url.into()))
.title("ChatGPT") .title("ChatGPT")

View File

@@ -6,7 +6,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
let theme = conf::ChatConfJson::theme(); let theme = conf::ChatConfJson::theme();
let app = handle.clone(); let app = handle.clone();
std::thread::spawn(move || { tokio::spawn(async move {
WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into())) WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into()))
.title("ChatGPT") .title("ChatGPT")
.resizable(false) .resizable(false)
@@ -16,10 +16,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.always_on_top(true) .always_on_top(true)
.theme(theme) .theme(theme)
.initialization_script(&utils::user_script()) .initialization_script(&utils::user_script())
.initialization_script(include_str!("../assets/html2canvas.js"))
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../assets/core.js")) .initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js")) .initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_tray) .user_agent(&chat_conf.ua_tray)
.build() .build()
@@ -28,3 +25,17 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.unwrap(); .unwrap();
}); });
} }
pub fn control_window(handle: &tauri::AppHandle) {
let app = handle.clone();
tokio::spawn(async move {
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
.title("Control Center")
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
.min_inner_size(800.0, 600.0)
.build()
.unwrap();
});
}

View File

@@ -13,13 +13,30 @@ function init() {
z-index: 9999; z-index: 9999;
} }
.chat-model-cmd-list>div { .chat-model-cmd-list>div {
border: solid 2px #d8d8d8; border: solid 2px rgba(80,80,80,.3);
border-radius: 5px; border-radius: 5px;
background-color: #fff; background-color: #fff;
} }
html.dark .chat-model-cmd-list>div {
background-color: #4a4a4a;
}
html.dark .chat-model-cmd-list .cmd-item {
border-color: #666;
}
html.dark .chat-model-cmd-list .cmd-item b {
color: #e8e8e8;
}
html.dark .chat-model-cmd-list .cmd-item i {
color: #999;
}
html.dark .chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.5);
}
.chat-model-cmd-list .cmd-item { .chat-model-cmd-list .cmd-item {
font-size: 12px; font-size: 12px;
border-bottom: solid 1px #888; border-bottom: solid 1px rgba(80,80,80,.2);
padding: 2px 4px; padding: 2px 4px;
display: flex; display: flex;
user-select: none; user-select: none;
@@ -28,6 +45,9 @@ function init() {
.chat-model-cmd-list .cmd-item:last-child { .chat-model-cmd-list .cmd-item:last-child {
border-bottom: none; border-bottom: none;
} }
.chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.3);
}
.chat-model-cmd-list .cmd-item b { .chat-model-cmd-list .cmd-item b {
display: inline-block; display: inline-block;
width: 100px; width: 100px;
@@ -46,7 +66,16 @@ function init() {
white-space: nowrap; white-space: nowrap;
text-align: right; text-align: right;
color: #888; color: #888;
}`; }
.chatappico {
width: 20px;
height: 20px;
}
.chatappico.pdf {
width: 24px;
height: 24px;
}
`;
document.head.append(styleDom); document.head.append(styleDom);
if (window.formInterval) { if (window.formInterval) {
@@ -70,11 +99,24 @@ async function cmdTip() {
// fix: tray window // fix: tray window
if (__TAURI_METADATA__.__currentWindow.label === 'tray') { if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
modelDom.style.bottom = '40px'; modelDom.style.bottom = '54px';
} }
document.querySelector('form').appendChild(modelDom); document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`; const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-cmd="${v.cmd}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const renderList = (v) => {
modelDom.innerHTML = `<div>${v.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = v[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = v[0]?.cmd.trim();
window.__list = modelDom.querySelectorAll('.cmd-item');
window.__index = 0;
window.__list[window.__index].classList.add('selected');
};
const setPrompt = (v = '') => {
if (v.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${v.trim()}}`);
}
}
const searchInput = document.querySelector('form textarea'); const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`. // Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.
@@ -84,6 +126,35 @@ async function cmdTip() {
return; return;
} }
// ------------------ Keyboard scrolling (ArrowUp | ArrowDown) --------------------------
if (event.keyCode === 38 && window.__index > 0) { // ArrowUp
window.__list[window.__index].classList.remove('selected');
window.__index = window.__index - 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
}
if (event.keyCode === 40 && window.__index < window.__list.length - 1) { // ArrowDown
window.__list[window.__index].classList.remove('selected');
window.__index = window.__index + 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
}
const containerHeight = modelDom.offsetHeight;
const itemHeight = window.__list[0].offsetHeight + 1;
const itemTop = window.__list[window.__index].offsetTop;
const itemBottom = itemTop + itemHeight;
if (itemTop < modelDom.scrollTop || itemBottom > modelDom.scrollTop + containerHeight) {
modelDom.scrollTop = itemTop;
}
// ------------------ TAB key replaces `{q}` tag content -------------------------------
// feat: https://github.com/lencx/ChatGPT/issues/54 // feat: https://github.com/lencx/ChatGPT/issues/54
if (event.keyCode === 9 && !window.__CHAT_MODEL_STATUS__) { if (event.keyCode === 9 && !window.__CHAT_MODEL_STATUS__) {
const strGroup = window.__CHAT_MODEL_CMD_PROMPT__.match(/\{([^{}]*)\}/) || []; const strGroup = window.__CHAT_MODEL_CMD_PROMPT__.match(/\{([^{}]*)\}/) || [];
@@ -95,38 +166,34 @@ async function cmdTip() {
event.preventDefault(); event.preventDefault();
} }
if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) { if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) { // TAB
const data = searchInput.value.split('|->'); const data = searchInput.value.split('|->');
if (data[1]?.trim()) { if (data[1]?.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${data[1]?.trim()}}`); setPrompt(data[1]);
window.__CHAT_MODEL_STATUS__ = 2; window.__CHAT_MODEL_STATUS__ = 2;
} }
event.preventDefault(); event.preventDefault();
} }
// input text // input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) { if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) { // TAB
console.log('«110» /src/assets/cmd.js ~> ', __CHAT_MODEL_STATUS__);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__; searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__; delete window.__CHAT_MODEL_STATUS__;
event.preventDefault(); event.preventDefault();
} }
// type in a space to complete the fill // ------------------ type in a space to complete the fill ------------------------------------
if (event.keyCode === 32) { if (event.keyCode === 32) {
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__; searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__; delete window.__CHAT_MODEL_CMD_PROMPT__;
} }
// send // ------------------ send --------------------------------------------------------------------
if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) { if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) { // Enter
const data = searchInput.value.split('|->'); const data = searchInput.value.split('|->');
if (data[1]?.trim()) { setPrompt(data[1]);
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${data[1]?.trim()}}`);
}
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__; searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
@@ -137,7 +204,7 @@ async function cmdTip() {
} }
}); });
searchInput.addEventListener('input', (event) => { searchInput.addEventListener('input', () => {
if (searchInput.value === '') { if (searchInput.value === '') {
delete window.__CHAT_MODEL_CMD_PROMPT__; delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__; delete window.__CHAT_MODEL_CMD__;
@@ -154,17 +221,13 @@ async function cmdTip() {
// all cmd result // all cmd result
if (query === '/') { if (query === '/') {
modelDom.innerHTML = `<div>${data.map(itemDom).join('')}</div>`; renderList(data);
window.__CHAT_MODEL_CMD_PROMPT__ = data[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = data[0]?.cmd.trim();
return; return;
} }
const result = data.filter(i => new RegExp(query.substring(1)).test(i.cmd)); const result = data.filter(i => new RegExp(query.substring(1)).test(i.cmd));
if (result.length > 0) { if (result.length > 0) {
modelDom.innerHTML = `<div>${result.map(itemDom).join('')}</div>`; renderList(result);
window.__CHAT_MODEL_CMD_PROMPT__ = result[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = result[0]?.cmd.trim();
} else { } else {
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__; delete window.__CHAT_MODEL_CMD_PROMPT__;

View File

@@ -86,6 +86,10 @@ async function init() {
} }
} }
}); });
window.__sync_prompts = async function() {
await invoke('sync_prompts', { time: Date.now() });
}
} }
if ( if (

View File

@@ -3,6 +3,7 @@
const buttonOuterHTMLFallback = `<button class="btn flex justify-center gap-2 btn-neutral" id="download-png-button">Try Again</button>`; const buttonOuterHTMLFallback = `<button class="btn flex justify-center gap-2 btn-neutral" id="download-png-button">Try Again</button>`;
async function init() { async function init() {
if (window.innerWidth < 767) return;
const chatConf = await invoke('get_chat_conf') || {}; const chatConf = await invoke('get_chat_conf') || {};
if (window.buttonsInterval) { if (window.buttonsInterval) {
clearInterval(window.buttonsInterval); clearInterval(window.buttonsInterval);
@@ -88,7 +89,9 @@ function addActionsButtons(actionsArea, TryAgainButton) {
const downloadButton = TryAgainButton.cloneNode(true); const downloadButton = TryAgainButton.cloneNode(true);
downloadButton.id = "download-png-button"; downloadButton.id = "download-png-button";
downloadButton.setAttribute("share-ext", "true"); downloadButton.setAttribute("share-ext", "true");
downloadButton.innerText = "Generate PNG"; // downloadButton.innerText = "Generate PNG";
downloadButton.title = "Generate PNG";
downloadButton.innerHTML = setIcon('png');
downloadButton.onclick = () => { downloadButton.onclick = () => {
downloadThread(); downloadThread();
}; };
@@ -96,7 +99,9 @@ function addActionsButtons(actionsArea, TryAgainButton) {
const downloadPdfButton = TryAgainButton.cloneNode(true); const downloadPdfButton = TryAgainButton.cloneNode(true);
downloadPdfButton.id = "download-pdf-button"; downloadPdfButton.id = "download-pdf-button";
downloadButton.setAttribute("share-ext", "true"); downloadButton.setAttribute("share-ext", "true");
downloadPdfButton.innerText = "Download PDF"; // downloadPdfButton.innerText = "Download PDF";
downloadPdfButton.title = "Download PDF";
downloadPdfButton.innerHTML = setIcon('pdf');
downloadPdfButton.onclick = () => { downloadPdfButton.onclick = () => {
downloadThread({ as: Format.PDF }); downloadThread({ as: Format.PDF });
}; };
@@ -104,7 +109,9 @@ function addActionsButtons(actionsArea, TryAgainButton) {
const exportHtml = TryAgainButton.cloneNode(true); const exportHtml = TryAgainButton.cloneNode(true);
exportHtml.id = "download-html-button"; exportHtml.id = "download-html-button";
downloadButton.setAttribute("share-ext", "true"); downloadButton.setAttribute("share-ext", "true");
exportHtml.innerText = "Share Link"; // exportHtml.innerText = "Share Link";
exportHtml.title = "Share Link";
exportHtml.innerHTML = setIcon('link');
exportHtml.onclick = () => { exportHtml.onclick = () => {
sendRequest(); sendRequest();
}; };
@@ -269,4 +276,12 @@ if (
init(); init();
} else { } else {
document.addEventListener("DOMContentLoaded", init); document.addEventListener("DOMContentLoaded", init);
} }
function setIcon(type) {
return {
link: `<svg class="chatappico" viewBox="0 0 1024 1024"><path d="M1007.382 379.672L655.374 75.702C624.562 49.092 576 70.694 576 112.03v160.106C254.742 275.814 0 340.2 0 644.652c0 122.882 79.162 244.618 166.666 308.264 27.306 19.862 66.222-5.066 56.154-37.262C132.132 625.628 265.834 548.632 576 544.17V720c0 41.4 48.6 62.906 79.374 36.328l352.008-304c22.142-19.124 22.172-53.506 0-72.656z" p-id="8506" fill="currentColor"></path></svg>`,
png: `<svg class="chatappico" viewBox="0 0 1070 1024"><path d="M981.783273 0H85.224727C38.353455 0 0 35.374545 0 83.083636v844.893091c0 47.616 38.353455 86.574545 85.178182 86.574546h903.633454c46.917818 0 81.733818-38.958545 81.733819-86.574546V83.083636C1070.592 35.374545 1028.701091 0 981.783273 0zM335.825455 135.912727c74.193455 0 134.330182 60.974545 134.330181 136.285091 0 75.170909-60.136727 136.192-134.330181 136.192-74.286545 0-134.516364-61.021091-134.516364-136.192 0-75.264 60.229818-136.285091 134.516364-136.285091z m-161.512728 745.937455a41.890909 41.890909 0 0 1-27.648-10.379637 43.752727 43.752727 0 0 1-4.654545-61.067636l198.097454-255.162182a42.123636 42.123636 0 0 1 57.716364-6.702545l116.549818 128.139636 286.906182-352.814545c14.615273-18.711273 90.251636-106.775273 135.866182-6.935273 0.093091-0.093091 0.093091 112.965818 0.232727 247.761455 0.093091 140.8 0.093091 317.067636 0.093091 317.067636-1.024-0.093091-762.740364 0.093091-763.112727 0.093091z" fill="currentColor"></path></svg>`,
pdf: `<svg class="chatappico pdf" viewBox="0 0 1024 1024"><path d="M821.457602 118.382249H205.725895c-48.378584 0-87.959995 39.583368-87.959996 87.963909v615.731707c0 48.378584 39.581411 87.959995 87.959996 87.959996h615.733664c48.380541 0 87.961952-39.581411 87.961952-87.959996V206.346158c-0.001957-48.378584-39.583368-87.963909-87.963909-87.963909zM493.962468 457.544987c-10.112054 32.545237-21.72487 82.872662-38.806571 124.248336-8.806957 22.378397-8.380404 18.480717-15.001764 32.609808l5.71738-1.851007c58.760658-16.443827 99.901532-20.519564 138.162194-27.561607-7.67796-6.06371-14.350194-10.751884-19.631237-15.586807-26.287817-29.101504-35.464584-34.570387-70.440002-111.862636v0.003913z m288.36767 186.413594c-7.476424 8.356924-20.670227 13.191847-40.019704 13.191847-33.427694 0-63.808858-9.229597-107.79277-31.660824-75.648648 8.356924-156.097 17.214754-201.399704 31.729308-2.199293 0.876587-4.832967 1.759043-7.916674 3.077836-54.536215 93.237125-95.031389 132.767663-130.621199 131.19646-11.286054-0.49895-27.694661-7.044-32.973748-10.11988l-6.52157-6.196764-2.29517-4.353583c-3.07588-7.91863-3.954423-15.395054-2.197337-23.751977 4.838837-23.309771 29.907651-60.251638 82.686779-93.237126 8.356924-6.159587 27.430511-15.897917 45.020944-24.25484 13.311204-21.177004 19.45905-34.744531 36.341171-72.259702 19.102937-45.324228 36.505531-99.492589 47.500041-138.191543v-0.44025c-16.267727-53.219378-25.945401-89.310095-9.67376-147.80856 3.958337-16.71189 18.46702-33.864031 34.748444-33.864031h10.552304c10.115967 0 19.791684 3.520043 26.829814 10.552304 29.029107 29.031064 15.39114 103.824649 0.8805 162.323113-0.8805 2.63563-1.322707 4.832967-1.761 6.153717 17.59239 49.697378 45.400538 98.774492 73.108895 121.647926 11.436717 8.791304 22.638634 18.899444 36.71098 26.814161 19.791684-2.20125 37.517128-4.11487 55.547812-4.11487 54.540128 0 87.525615 9.67963 100.279169 30.351814 4.400543 7.034217 6.595923 15.389184 5.281043 24.1844-0.44025 10.996467-4.39663 21.112434-12.31526 29.031064z m-27.796407-36.748157c-4.394673-4.398587-17.024957-16.936907-78.601259-16.936907-3.073923 0-10.622744-0.784623-14.57521 3.612007 32.104987 14.072347 62.830525 24.757704 83.058545 24.757703 3.083707 0 5.72325-0.442207 8.356923-0.876586h1.759044c2.20125-0.8805 3.520043-1.324663 3.960293-5.71738-0.87463-1.324663-1.757087-3.083707-3.958336-4.838837z m-387.124553 63.041845c-9.237424 5.27713-16.71189 10.112054-21.112433 13.634053-31.226444 28.586901-51.018128 57.616008-53.217422 74.331812 19.789727-6.59788 45.737084-35.626987 74.329855-87.961952v-0.003913z m125.574957-297.822284l2.197336-1.761c3.079793-14.072347 5.232127-29.189554 7.87167-38.869184l1.318794-7.036174c4.39663-25.070771 2.71781-39.720334-4.76057-50.272637l-6.59788-2.20125a57.381208 57.381208 0 0 0-3.079794 5.27713c-7.474467 18.47289-7.063567 55.283661 3.0524 94.865072l-0.001956-0.001957z" fill="currentColor"></path></svg>`
}[type];
}

View File

@@ -1,5 +1,6 @@
use crate::utils::{chat_root, create_file, exists}; use crate::utils::{chat_root, create_file, exists};
use anyhow::Result; use anyhow::Result;
use log::info;
use serde_json::Value; use serde_json::Value;
use std::{collections::BTreeMap, fs, path::PathBuf, sync::Mutex}; use std::{collections::BTreeMap, fs, path::PathBuf, sync::Mutex};
use tauri::{Manager, Theme}; use tauri::{Manager, Theme};
@@ -7,16 +8,19 @@ use tauri::{Manager, Theme};
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
use tauri::TitleBarStyle; use tauri::TitleBarStyle;
// pub const USER_AGENT: &str = "5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36"; // pub const USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.1 Safari/605.1.15";
// pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1"; // pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 16_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.0 Mobile/15E148 Safari/604.1";
pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues"; pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues";
pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md"; pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md";
pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md"; pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md";
pub const GITHUB_PROMPTS_CSV_URL: &str =
"https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv";
pub const DEFAULT_CHAT_CONF: &str = r#"{ pub const DEFAULT_CHAT_CONF: &str = r#"{
"stay_on_top": false, "stay_on_top": false,
"theme": "Light", "theme": "Light",
"titlebar": true, "titlebar": true,
"global_shortcut": "",
"hide_dock_icon": false, "hide_dock_icon": false,
"default_origin": "https://chat.openai.com", "default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com", "origin": "https://chat.openai.com",
@@ -27,6 +31,7 @@ pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{
"stay_on_top": false, "stay_on_top": false,
"theme": "Light", "theme": "Light",
"titlebar": false, "titlebar": false,
"global_shortcut": "",
"hide_dock_icon": false, "hide_dock_icon": false,
"default_origin": "https://chat.openai.com", "default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com", "origin": "https://chat.openai.com",
@@ -60,12 +65,14 @@ pub struct ChatConfJson {
pub origin: String, pub origin: String,
pub ua_window: String, pub ua_window: String,
pub ua_tray: String, pub ua_tray: String,
pub global_shortcut: Option<String>,
} }
impl ChatConfJson { impl ChatConfJson {
/// init chat.conf.json /// init chat.conf.json
/// path: ~/.chatgpt/chat.conf.json /// path: ~/.chatgpt/chat.conf.json
pub fn init() -> PathBuf { pub fn init() -> PathBuf {
info!("chat_conf_init");
let conf_file = ChatConfJson::conf_path(); let conf_file = ChatConfJson::conf_path();
let content = if cfg!(target_os = "macos") { let content = if cfg!(target_os = "macos") {
DEFAULT_CHAT_CONF_MAC DEFAULT_CHAT_CONF_MAC

View File

@@ -15,7 +15,8 @@ use tauri_plugin_log::{
LogTarget, LoggerBuilder, LogTarget, LoggerBuilder,
}; };
fn main() { #[tokio::main]
async fn main() {
ChatConfJson::init(); ChatConfJson::init();
// If the file does not exist, creating the file will block menu synchronization // If the file does not exist, creating the file will block menu synchronization
utils::create_chatgpt_prompts(); utils::create_chatgpt_prompts();
@@ -61,6 +62,8 @@ fn main() {
cmd::open_file, cmd::open_file,
cmd::get_chat_model_cmd, cmd::get_chat_model_cmd,
cmd::parse_prompt, cmd::parse_prompt,
cmd::sync_prompts,
cmd::sync_user_prompts,
cmd::window_reload, cmd::window_reload,
cmd::cmd_list, cmd::cmd_list,
fs_extra::metadata, fs_extra::metadata,
@@ -76,7 +79,7 @@ fn main() {
if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() { if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() {
let win = event.window(); let win = event.window();
if win.label() == "main" { if win.label() == "main" {
win.hide().unwrap(); win.close().unwrap();
} else { } else {
// TODO: https://github.com/tauri-apps/tauri/issues/3084 // TODO: https://github.com/tauri-apps/tauri/issues/3084
// event.window().hide().unwrap(); // event.window().hide().unwrap();

View File

@@ -1,23 +1,25 @@
use anyhow::Result; use anyhow::Result;
use log::info; use log::info;
use regex::Regex;
use serde_json::Value;
use std::{ use std::{
collections::HashMap,
fs::{self, File}, fs::{self, File},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
}; };
use tauri::Manager; use tauri::{utils::config::Config, Manager};
// use tauri::utils::config::Config;
pub fn chat_root() -> PathBuf { pub fn chat_root() -> PathBuf {
tauri::api::path::home_dir().unwrap().join(".chatgpt") tauri::api::path::home_dir().unwrap().join(".chatgpt")
} }
// pub fn get_tauri_conf() -> Option<Config> { pub fn get_tauri_conf() -> Option<Config> {
// let config_file = include_str!("../tauri.conf.json"); let config_file = include_str!("../tauri.conf.json");
// let config: Config = let config: Config =
// serde_json::from_str(config_file).expect("failed to parse tauri.conf.json"); serde_json::from_str(config_file).expect("failed to parse tauri.conf.json");
// Some(config) Some(config)
// } }
pub fn exists(path: &Path) -> bool { pub fn exists(path: &Path) -> bool {
Path::new(path).exists() Path::new(path).exists()
@@ -89,3 +91,40 @@ pub fn clear_conf(app: &tauri::AppHandle) {
}, },
); );
} }
pub fn merge(v: &Value, fields: &HashMap<String, Value>) -> Value {
match v {
Value::Object(m) => {
let mut m = m.clone();
for (k, v) in fields {
m.insert(k.clone(), v.clone());
}
Value::Object(m)
}
v => v.clone(),
}
}
pub fn gen_cmd(name: String) -> String {
let re = Regex::new(r"[^a-zA-Z0-9]").unwrap();
re.replace_all(&name, "_").to_lowercase()
}
pub async fn get_data(
url: &str,
app: Option<&tauri::AppHandle>,
) -> Result<Option<String>, reqwest::Error> {
let res = reqwest::get(url).await?;
let is_ok = res.status() == 200;
let body = res.text().await?;
if is_ok {
Ok(Some(body))
} else {
info!("chatgpt_http_error: {}", body);
if let Some(v) = app {
tauri::api::dialog::message(v.get_window("core").as_ref(), "ChatGPT HTTP", body);
}
Ok(None)
}
}

View File

@@ -7,24 +7,18 @@
}, },
"package": { "package": {
"productName": "ChatGPT", "productName": "ChatGPT",
"version": "0.6.4" "version": "0.7.4"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
"all": true, "all": true,
"http": { "globalShortcut": {
"all": true, "all": true
"scope": [
"https://**",
"http://**"
]
}, },
"fs": { "fs": {
"all": true, "all": true,
"scope": [ "scope": [
"$HOME/.chatgpt/*", "$HOME/.chatgpt/**"
"$HOME/.chatgpt/**",
"$HOME/.chatgpt/cache_model/*"
] ]
} }
}, },
@@ -79,18 +73,6 @@
"https://lencx.github.io/ChatGPT/install.json" "https://lencx.github.io/ChatGPT/install.json"
], ],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK" "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK"
}, }
"windows": [
{
"label": "main",
"url": "index.html",
"title": "ChatGPT",
"visible": false,
"width": 800,
"height": 600,
"minWidth": 800,
"minHeight": 600
}
]
} }
} }

View File

@@ -18,8 +18,6 @@ export default function useData(oData: any[]) {
const opInit = (val: any[] = []) => { const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return; if (!val || !Array.isArray(val)) return;
console.log('«20» /src/hooks/useData.ts ~> ', val);
const nData = val.map(i => ({ [safeKey]: v4(), ...i })); const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData); setData(nData);
}; };

34
src/hooks/useEvent.ts vendored
View File

@@ -1,34 +0,0 @@
import { invoke, path, http, fs, dialog } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sync_prompts');
const { modelCacheSet } = useCacheModel();
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {
(window as any).__sync_prompts = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList, file);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
dialog.message('ChatGPT Prompts data has been synchronized!');
} else {
dialog.message('ChatGPT Prompts data sync failed, please try again!');
}
}
})
}

14
src/main.scss vendored
View File

@@ -45,6 +45,12 @@ html, body {
} }
} }
.chat-table-tip {
> span {
line-height: 16px;
}
}
.chat-sync-path { .chat-sync-path {
font-size: 12px; font-size: 12px;
font-weight: 500; font-weight: 500;
@@ -52,6 +58,14 @@ html, body {
margin-bottom: 5px; margin-bottom: 5px;
line-height: 16px; line-height: 16px;
> div {
max-width: 400px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
color: #2a2a2a;
}
span { span {
display: inline-block; display: inline-block;
// background-color: #d8d8d8; // background-color: #d8d8d8;

14
src/main.tsx vendored
View File

@@ -2,23 +2,15 @@ import { StrictMode, Suspense } from 'react';
import { BrowserRouter } from 'react-router-dom'; import { BrowserRouter } from 'react-router-dom';
import ReactDOM from 'react-dom/client'; import ReactDOM from 'react-dom/client';
import useEvent from '@/hooks/useEvent';
import Layout from '@/layout'; import Layout from '@/layout';
import './main.scss'; import './main.scss';
const App = () => {
useEvent();
return (
<BrowserRouter>
<Layout/>
</BrowserRouter>
);
}
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render( ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
<StrictMode> <StrictMode>
<Suspense fallback={null}> <Suspense fallback={null}>
<App /> <BrowserRouter>
<Layout/>
</BrowserRouter>
</Suspense> </Suspense>
</StrictMode> </StrictMode>
); );

22
src/utils.ts vendored
View File

@@ -1,4 +1,4 @@
import { readTextFile, writeTextFile, exists, createDir, BaseDirectory } from '@tauri-apps/api/fs'; import { readTextFile, writeTextFile, exists, createDir } from '@tauri-apps/api/fs';
import { homeDir, join, dirname } from '@tauri-apps/api/path'; import { homeDir, join, dirname } from '@tauri-apps/api/path';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
@@ -20,10 +20,6 @@ export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON); return join(await chatRoot(), CHAT_MODEL_JSON);
} }
// export const chatModelSyncPath = async (): Promise<string> => {
// return join(await chatRoot(), CHAT_MODEL_SYNC_JSON);
// }
export const chatPromptsPath = async (): Promise<string> => { export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV); return join(await chatRoot(), CHAT_PROMPTS_CSV);
} }
@@ -32,10 +28,16 @@ type readJSONOpts = { defaultVal?: Record<string, any>, isRoot?: boolean, isList
export const readJSON = async (path: string, opts: readJSONOpts = {}) => { export const readJSON = async (path: string, opts: readJSONOpts = {}) => {
const { defaultVal = {}, isRoot = false, isList = false } = opts; const { defaultVal = {}, isRoot = false, isList = false } = opts;
const root = await chatRoot(); const root = await chatRoot();
const file = await join(isRoot ? '' : root, path); let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (!await exists(file)) { if (!await exists(file)) {
await createDir(await dirname(file), { recursive: true }); if (await dirname(file) !== root) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, isList ? '[]' : JSON.stringify({ await writeTextFile(file, isList ? '[]' : JSON.stringify({
name: 'ChatGPT', name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT', link: 'https://github.com/lencx/ChatGPT',
@@ -54,7 +56,11 @@ type writeJSONOpts = { dir?: string, isRoot?: boolean };
export const writeJSON = async (path: string, data: Record<string, any>, opts: writeJSONOpts = {}) => { export const writeJSON = async (path: string, data: Record<string, any>, opts: writeJSONOpts = {}) => {
const { isRoot = false } = opts; const { isRoot = false } = opts;
const root = await chatRoot(); const root = await chatRoot();
const file = await join(isRoot ? '' : root, path); let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (isRoot && !await exists(await dirname(file))) { if (isRoot && !await exists(await dirname(file))) {
await createDir(await dirname(file), { recursive: true }); await createDir(await dirname(file), { recursive: true });

120
src/view/General.tsx vendored
View File

@@ -1,36 +1,54 @@
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { Form, Radio, Switch, Input, Button, Space, message, Tooltip } from 'antd'; import { Form, Radio, Switch, Input, Button, Space, message, Tooltip } from 'antd';
import { QuestionCircleOutlined } from '@ant-design/icons'; import { QuestionCircleOutlined } from '@ant-design/icons';
import { invoke } from '@tauri-apps/api'; import { invoke, shell, path } from '@tauri-apps/api';
import { platform } from '@tauri-apps/api/os'; import { platform } from '@tauri-apps/api/os';
import { ask } from '@tauri-apps/api/dialog'; import { ask } from '@tauri-apps/api/dialog';
import { relaunch } from '@tauri-apps/api/process'; import { relaunch } from '@tauri-apps/api/process';
import { clone, omit, isEqual } from 'lodash'; import { clone, omit, isEqual } from 'lodash';
import { DISABLE_AUTO_COMPLETE } from '@/utils'; import useInit from '@/hooks/useInit';
import { DISABLE_AUTO_COMPLETE, chatRoot } from '@/utils';
const OriginLabel = ({ url }: { url: string }) => { const OriginLabel = ({ url }: { url: string }) => {
return ( return (
<span> <span>
Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined /></Tooltip> Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined style={{ color: '#1677ff' }} /></Tooltip>
</span> </span>
) )
} }
const GlobalShortcut = () => {
return (
<div>
Global Shortcut
{' '}
<Tooltip title={(
<div>
<div>Shortcut definition, modifiers and key separated by "+" e.g. CmdOrControl+Q</div>
<div style={{ margin: '10px 0'}}>If empty, the shortcut is disabled.</div>
<a href="https://tauri.app/v1/api/js/globalshortcut" target="_blank">https://tauri.app/v1/api/js/globalshortcut</a>
</div>
)}>
<QuestionCircleOutlined style={{ color: '#1677ff' }} />
</Tooltip>
</div>
)
}
export default function General() { export default function General() {
const [form] = Form.useForm(); const [form] = Form.useForm();
const [jsonPath, setJsonPath] = useState('');
const [platformInfo, setPlatform] = useState<string>(''); const [platformInfo, setPlatform] = useState<string>('');
const [chatConf, setChatConf] = useState<any>(null); const [chatConf, setChatConf] = useState<any>(null);
const init = async () => { useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'chat.conf.json'));
setPlatform(await platform()); setPlatform(await platform());
const chatData = await invoke('get_chat_conf'); const chatData = await invoke('get_chat_conf');
setChatConf(chatData); setChatConf(chatData);
} });
useEffect(() => {
init();
}, [])
useEffect(() => { useEffect(() => {
form.setFieldsValue(clone(chatConf)); form.setFieldsValue(clone(chatConf));
@@ -55,44 +73,54 @@ export default function General() {
}; };
return ( return (
<Form <>
form={form} <div className="chat-table-tip">
style={{ maxWidth: 500 }} <div className="chat-sync-path">
onFinish={onFinish} <div>PATH: <a onClick={() => shell.open(jsonPath)} title={jsonPath}>{jsonPath}</a></div>
labelCol={{ span: 8 }} </div>
wrapperCol={{ span: 15, offset: 1 }} </div>
> <Form
<Form.Item label="Theme" name="theme"> form={form}
<Radio.Group> style={{ maxWidth: 500 }}
<Radio value="Light">Light</Radio> onFinish={onFinish}
<Radio value="Dark">Dark</Radio> labelCol={{ span: 8 }}
</Radio.Group> wrapperCol={{ span: 15, offset: 1 }}
</Form.Item> >
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked"> <Form.Item label="Theme" name="theme">
<Switch /> <Radio.Group>
</Form.Item> <Radio value="Light">Light</Radio>
{platformInfo === 'darwin' && ( <Radio value="Dark">Dark</Radio>
<Form.Item label="Titlebar" name="titlebar" valuePropName="checked"> </Radio.Group>
</Form.Item>
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked">
<Switch /> <Switch />
</Form.Item> </Form.Item>
)} <Form.Item label={<GlobalShortcut />} name="global_shortcut">
<Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin"> <Input placeholder="CmdOrCtrl+Shift+O" {...DISABLE_AUTO_COMPLETE} />
<Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} /> </Form.Item>
</Form.Item> {platformInfo === 'darwin' && (
<Form.Item label="User Agent (Window)" name="ua_window"> <Form.Item label="Titlebar" name="titlebar" valuePropName="checked">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" /> <Switch />
</Form.Item> </Form.Item>
<Form.Item label="User Agent (SystemTray)" name="ua_tray"> )}
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" /> <Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin">
</Form.Item> <Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} />
<Form.Item> </Form.Item>
<Space size={20}> <Form.Item label="User Agent (Window)" name="ua_window">
<Button onClick={onCancel}>Cancel</Button> <Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
<Button type="primary" htmlType="submit"> </Form.Item>
Submit <Form.Item label="User Agent (SystemTray)" name="ua_tray">
</Button> <Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Space> </Form.Item>
</Form.Item> <Form.Item>
</Form> <Space size={20}>
<Button onClick={onCancel}>Cancel</Button>
<Button type="primary" htmlType="submit">
Submit
</Button>
</Space>
</Form.Item>
</Form>
</>
) )
} }

View File

@@ -8,6 +8,7 @@ import useInit from '@/hooks/useInit';
interface SyncFormProps { interface SyncFormProps {
record?: Record<string|symbol, any> | null; record?: Record<string|symbol, any> | null;
type: string;
} }
const initFormValue = { const initFormValue = {
@@ -17,7 +18,8 @@ const initFormValue = {
prompt: '', prompt: '',
}; };
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }, ref) => { const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record, type }, ref) => {
const isDisabled = type === 'edit';
const [form] = Form.useForm(); const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form })); useImperativeHandle(ref, () => ({ form }));
const [root, setRoot] = useState(''); const [root, setRoot] = useState('');
@@ -34,7 +36,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
const pathOptions = ( const pathOptions = (
<Form.Item noStyle name="protocol" initialValue="https"> <Form.Item noStyle name="protocol" initialValue="https">
<Select> <Select disabled={isDisabled}>
<Select.Option value="local">{root}</Select.Option> <Select.Option value="local">{root}</Select.Option>
<Select.Option value="http">http://</Select.Option> <Select.Option value="http">http://</Select.Option>
<Select.Option value="https">https://</Select.Option> <Select.Option value="https">https://</Select.Option>
@@ -43,7 +45,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
); );
const extOptions = ( const extOptions = (
<Form.Item noStyle name="ext" initialValue="json"> <Form.Item noStyle name="ext" initialValue="json">
<Select> <Select disabled={isDisabled}>
<Select.Option value="csv">.csv</Select.Option> <Select.Option value="csv">.csv</Select.Option>
<Select.Option value="json">.json</Select.Option> <Select.Option value="json">.json</Select.Option>
</Select> </Select>
@@ -90,8 +92,13 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
label="PATH" label="PATH"
name="path" name="path"
rules={[{ required: true, message: 'Please input path!' }]} rules={[{ required: true, message: 'Please input path!' }]}
> >
<Input placeholder="YOUR_PATH" addonBefore={pathOptions} addonAfter={extOptions} {...DISABLE_AUTO_COMPLETE} /> <Input
placeholder="YOUR_PATH"
addonBefore={pathOptions}
addonAfter={extOptions}
{...DISABLE_AUTO_COMPLETE}
/>
</Form.Item> </Form.Item>
<Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item> <Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item>
</Form> </Form>

View File

@@ -34,7 +34,7 @@ export const syncColumns = () => [
key: 'last_updated', key: 'last_updated',
width: 140, width: 140,
render: (v: number) => ( render: (v: number) => (
<div style={{ textAlign: 'center' }}> <div>
<HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} /> <HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} />
{ v ? fmtDate(v) : ''} { v ? fmtDate(v) : ''}
</div> </div>
@@ -47,7 +47,15 @@ export const syncColumns = () => [
render: (_: any, row: any, actions: any) => { render: (_: any, row: any, actions: any) => {
return ( return (
<Space> <Space>
<a onClick={() => actions.setRecord(row, 'sync')}>Sync</a> <Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
onConfirm={() => actions.setRecord(row, 'sync')}
okText="Yes"
cancelText="No"
>
<a>Sync</a>
</Popconfirm>
{row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>} {row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>}
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a> <a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm <Popconfirm

View File

@@ -1,6 +1,6 @@
import { useState, useRef, useEffect } from 'react'; import { useState, useRef, useEffect } from 'react';
import { Table, Modal, Button, message } from 'antd'; import { Table, Modal, Button, message } from 'antd';
import { invoke, http, path, fs } from '@tauri-apps/api'; import { invoke, path, fs } from '@tauri-apps/api';
import useData from '@/hooks/useData'; import useData from '@/hooks/useData';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel'; import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
@@ -10,7 +10,7 @@ import { CHAT_MODEL_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import { syncColumns, getPath } from './config'; import { syncColumns, getPath } from './config';
import SyncForm from './Form'; import SyncForm from './Form';
const setTag = (data: Record<string, any>[]) => data.map((i) => ({ ...i, tags: ['user-sync'], enable: true })) const fmtData = (data: Record<string, any>[] = []) => (Array.isArray(data) ? data : []).map((i) => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), tags: ['user-sync'], enable: true }));
export default function SyncCustom() { export default function SyncCustom() {
const [isVisible, setVisible] = useState(false); const [isVisible, setVisible] = useState(false);
@@ -34,7 +34,9 @@ export default function SyncCustom() {
if (!opInfo.opType) return; if (!opInfo.opType) return;
if (opInfo.opType === 'sync') { if (opInfo.opType === 'sync') {
const filename = `${opInfo?.opRecord?.id}.json`; const filename = `${opInfo?.opRecord?.id}.json`;
handleSync(filename).then(() => { handleSync(filename).then((isOk: boolean) => {
opInfo.resetRecord();
if (!isOk) return;
const data = opReplace(opInfo?.opRecord?.[opSafeKey], { ...opInfo?.opRecord, last_updated: Date.now() }); const data = opReplace(opInfo?.opRecord?.[opSafeKey], { ...opInfo?.opRecord, last_updated: Date.now() });
modelSet(data); modelSet(data);
opInfo.resetRecord(); opInfo.resetRecord();
@@ -44,9 +46,16 @@ export default function SyncCustom() {
setVisible(true); setVisible(true);
} }
if (['delete'].includes(opInfo.opType)) { if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]); (async () => {
modelSet(data); try {
opInfo.resetRecord(); const file = await path.join(await chatRoot(), 'cache_model', `${opInfo?.opRecord?.id}.json`);
await fs.removeFile(file);
} catch(e) {}
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
modelCacheCmd();
})();
} }
}, [opInfo.opType, formRef]); }, [opInfo.opType, formRef]);
@@ -58,40 +67,30 @@ export default function SyncCustom() {
// https or http // https or http
if (/^http/.test(record?.protocol)) { if (/^http/.test(record?.protocol)) {
const res = await http.fetch(filePath, { const data = await invoke('sync_user_prompts', { url: filePath, dataType: record?.ext });
method: 'GET', if (data) {
responseType: isJson ? 1 : 2, await modelCacheSet(data as [], file);
});
if (res.ok) {
if (isJson) {
// parse json
await modelCacheSet(setTag(Array.isArray(res?.data) ? res?.data : []), file);
} else {
// parse csv
const list: Record<string, string>[] = await invoke('parse_prompt', { data: res?.data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] }));
await modelCacheSet(fmtList, file);
}
await modelCacheCmd(); await modelCacheCmd();
message.success('ChatGPT Prompts data has been synchronized!'); message.success('ChatGPT Prompts data has been synchronized!');
return true;
} else { } else {
message.error('ChatGPT Prompts data sync failed, please try again!'); message.error('ChatGPT Prompts data sync failed, please try again!');
return false;
} }
return;
} }
// local // local
if (isJson) { if (isJson) {
// parse json // parse json
const data = await readJSON(filePath, { isRoot: true }); const data = await readJSON(filePath, { isRoot: true });
await modelCacheSet(setTag(Array.isArray(data) ? data : []), file); await modelCacheSet(fmtData(data), file);
} else { } else {
// parse csv // parse csv
const data = await fs.readTextFile(filePath); const data = await fs.readTextFile(filePath);
const list: Record<string, string>[] = await invoke('parse_prompt', { data }); const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] })); await modelCacheSet(fmtData(list), file);
await modelCacheSet(fmtList, file);
} }
await modelCacheCmd(); await modelCacheCmd();
return true;
}; };
const handleOk = () => { const handleOk = () => {
@@ -128,12 +127,12 @@ export default function SyncCustom() {
<Modal <Modal
open={isVisible} open={isVisible}
onCancel={hide} onCancel={hide}
title="Model PATH" title="Sync PATH"
onOk={handleOk} onOk={handleOk}
destroyOnClose destroyOnClose
maskClosable={false} maskClosable={false}
> >
<SyncForm ref={formRef} record={opInfo?.opRecord} /> <SyncForm ref={formRef} record={opInfo?.opRecord} type={opInfo.opType} />
</Modal> </Modal>
</div> </div>
) )

View File

@@ -1,13 +1,13 @@
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { Table, Button, message, Popconfirm } from 'antd'; import { Table, Button, Popconfirm } from 'antd';
import { invoke, http, path, shell } from '@tauri-apps/api'; import { invoke, path, shell } from '@tauri-apps/api';
import useInit from '@/hooks/useInit'; import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData'; import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns'; import useColumns from '@/hooks/useColumns';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel'; import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable'; import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot, GITHUB_PROMPTS_CSV_URL, genCmd } from '@/utils'; import { fmtDate, chatRoot } from '@/utils';
import { syncColumns } from './config'; import { syncColumns } from './config';
import './index.scss'; import './index.scss';
@@ -33,24 +33,13 @@ export default function SyncPrompts() {
}, [modelCacheJson.length]); }, [modelCacheJson.length]);
const handleSync = async () => { const handleSync = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, { const data = await invoke('sync_prompts', { time: Date.now() });
method: 'GET', if (data) {
responseType: http.ResponseType.Text, opInit(data as any[]);
});
const data = (res.data || '') as string;
if (res.ok) {
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList);
opInit(fmtList);
modelSet({ modelSet({
id: 'chatgpt_prompts', id: 'chatgpt_prompts',
last_updated: Date.now(), last_updated: Date.now(),
}); });
message.success('ChatGPT Prompts data has been synchronized!');
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
} }
}; };
@@ -69,6 +58,16 @@ export default function SyncPrompts() {
return ( return (
<div> <div>
<div className="chat-table-btns"> <div className="chat-table-btns">
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
<div> <div>
{selectedItems.length > 0 && ( {selectedItems.length > 0 && (
<> <>
@@ -78,15 +77,6 @@ export default function SyncPrompts() {
</> </>
)} )}
</div> </div>
<Popconfirm
title={<span>Data sync will enable all prompts,<br/>are you sure you want to sync?</span>}
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
</div> </div>
<div className="chat-table-tip"> <div className="chat-table-tip">
<div className="chat-sync-path"> <div className="chat-sync-path">

View File

@@ -10,7 +10,7 @@ export const syncColumns = () => [
// width: 120, // width: 120,
key: 'cmd', key: 'cmd',
render: (_: string, row: Record<string, string>) => ( render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag> <Tag color="#2a2a2a">/{row.cmd ? row.cmd : genCmd(row.act)}</Tag>
), ),
}, },
{ {
@@ -24,7 +24,9 @@ export const syncColumns = () => [
dataIndex: 'tags', dataIndex: 'tags',
key: 'tags', key: 'tags',
// width: 150, // width: 150,
render: () => <Tag>chatgpt-prompts</Tag>, render: (v: string[]) => (
<span className="chat-prompts-tags">{v?.map(i => <Tag key={i}>{i}</Tag>)}</span>
),
}, },
{ {
title: 'Enable', title: 'Enable',