Compare commits

...

102 Commits

Author SHA1 Message Date
lencx
dc769235a7 v0.7.4 2022-12-31 20:11:21 +08:00
lencx
52cc029b01 readme 2022-12-31 20:11:15 +08:00
lencx
39dc007513 readme 2022-12-31 20:08:11 +08:00
lencx
ba1fe9a603 refactor: global shortcut 2022-12-31 20:07:09 +08:00
lencx
e1f8030009 refactor: prompts sync 2022-12-31 18:01:31 +08:00
lencx
9a392a71f6 chore: conf path 2022-12-31 12:43:47 +08:00
lencx
dc0c78fee2 fix: customize global shortcuts (#108) 2022-12-30 23:46:30 +08:00
lencx
b3bd54ce81 Merge pull request #101 from lencx/dev 2022-12-29 21:31:43 +08:00
lencx
ba21fa85d2 v0.7.3 2022-12-29 21:06:40 +08:00
lencx
2ab35bb925 readme 2022-12-29 21:06:29 +08:00
lencx
9cacad0120 chore: optim style 2022-12-29 21:05:21 +08:00
lencx
f1fa859961 chore: optim 2022-12-29 19:29:33 +08:00
lencx
9a9fb24de8 chore: icon & global shortcuts 2022-12-29 01:42:00 +08:00
lencx
3424666ec9 chore: tauri conf 2022-12-29 01:42:00 +08:00
lencx
416bf7064c Merge pull request #97 from Sturlen/main 2022-12-28 18:22:32 +08:00
Sturlen
f5cf3acd3a fix: matching action button colors 2022-12-28 10:47:30 +01:00
lencx
975ffd2d84 Merge pull request #92 from lencx/dev 2022-12-28 03:49:04 +08:00
lencx
145264719f chore: action 2022-12-28 03:35:08 +08:00
lencx
a929376cb2 v0.7.2 2022-12-28 03:34:05 +08:00
lencx
478049e23e fix: windows can't start 2022-12-28 03:33:40 +08:00
lencx
631dee95a7 fix: windows can't start 2022-12-28 03:07:17 +08:00
lencx
c4ff0b4107 Merge pull request #91 from lencx/dev 2022-12-28 01:44:22 +08:00
lencx
bcd350584e v0.7.1 2022-12-28 01:25:49 +08:00
lencx
050045f644 chore: action 2022-12-28 01:25:28 +08:00
lencx
7e9440b45e readme 2022-12-28 01:13:22 +08:00
lencx
cd9c0ac742 fix: windows can't start (#85) 2022-12-28 01:13:00 +08:00
lencx
2d018c4967 fix: tray icon (#87) 2022-12-28 01:12:03 +08:00
lencx
f4d3cc6c8e Merge branch 'main' of github.com:lencx/ChatGPT 2022-12-27 21:33:29 +08:00
xueweiwujxw
cd6cece45e fix(src-tauri/src/app/menu.rs): warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 21:29:46 +08:00
lencx
54b5b63f0e v0.7.0 2022-12-27 21:29:11 +08:00
lencx
680f1b01ad readme 2022-12-27 21:29:11 +08:00
lencx
078b0296f5 chore: cmd 2022-12-27 21:29:11 +08:00
lencx
c956758a4a readme 2022-12-27 21:29:11 +08:00
lencx
477120ef3b feat: use the keyboard to select the slash command 2022-12-27 21:29:11 +08:00
lencx
0ee95630ef Merge pull request #86 from xueweiwujxw/menu-warn 2022-12-27 21:15:28 +08:00
xueweiwujxw
fb0319a977 🐞 fix(src-tauri/src/app/menu.rs): fix warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 20:50:23 +08:00
lencx
ea1a78abf5 Merge pull request #84 from lencx/dev 2022-12-27 15:31:56 +08:00
lencx
3428e11b85 v0.7.0 2022-12-27 15:15:47 +08:00
lencx
0e0771d0ec readme 2022-12-27 15:14:49 +08:00
lencx
d78e2ad0b3 chore: cmd 2022-12-27 15:14:24 +08:00
lencx
ae31da0b29 readme 2022-12-27 14:54:28 +08:00
lencx
39febe759e feat: use the keyboard to select the slash command 2022-12-27 14:54:28 +08:00
lencx
06ee907199 Merge pull request #81 from beilunyang/patch-1 2022-12-27 12:17:58 +08:00
BeilunYang
f8c1ca5c56 fix(build): mac m1 chip copy/paste 2022-12-27 11:34:16 +08:00
lencx
6da58269bd Merge pull request #79 from weltonrodrigo/patch-1 2022-12-26 10:19:28 +08:00
Welton Rodrigo Torres Nascimento
4bf6c61bee bump homebrew cask to 0.6.10 2022-12-25 20:29:47 -03:00
lencx
a07c85a9cc Merge pull request #78 from lencx/dev 2022-12-25 09:22:15 +08:00
lencx
95a9f12b68 v0.6.10 2022-12-25 08:54:11 +08:00
lencx
252b0f3e15 fix: windows sync 2022-12-25 08:53:58 +08:00
lencx
ed268b32b3 Merge pull request #77 from lencx/dev 2022-12-25 02:27:58 +08:00
lencx
e2319f2fda v0.6.9 2022-12-25 02:01:51 +08:00
lencx
9ec69631f3 readme 2022-12-25 02:01:47 +08:00
lencx
83437ffea7 Merge pull request #73 from lencx/dev 2022-12-24 22:56:30 +08:00
lencx
be9846dc22 readme 2022-12-24 22:31:03 +08:00
lencx
f071e0d6bc v0.6.8 2022-12-24 22:30:28 +08:00
lencx
62a176d20c Merge pull request #72 from lencx/dev 2022-12-24 21:51:21 +08:00
lencx
2f8ff36638 v0.6.7 2022-12-24 21:37:35 +08:00
lencx
fe236e3c66 Merge pull request #71 from lencx/dev 2022-12-24 21:20:41 +08:00
lencx
38e319a215 v0.6.6 2022-12-24 21:06:16 +08:00
lencx
05057d06ad fix: unable to synchronize 2022-12-24 21:05:51 +08:00
lencx
0b0b832130 Merge pull request #70 from lencx/dev 2022-12-24 20:26:23 +08:00
lencx
413d3354c7 v0.6.5 2022-12-24 20:07:07 +08:00
lencx
f1c7fff800 readme 2022-12-24 20:06:56 +08:00
lencx
6fe90dea5b fix: path not allowed on the configured scope (#64) 2022-12-24 20:04:24 +08:00
lencx
25ab2b0368 chore: optim 2022-12-24 20:04:14 +08:00
lencx
94973b1420 Merge pull request #69 from JacobLinCool/patch-1 2022-12-24 01:10:52 +08:00
JacobLinCool
0930cd782a docs: fix cask name in brewfile section 2022-12-24 00:28:15 +08:00
lencx
0733bba4bf Merge pull request #67 from lencx/fix 2022-12-23 23:07:54 +08:00
lencx
bf623365da v0.6.4 2022-12-23 22:51:39 +08:00
lencx
dc88ea9182 fix: path not allowed on the configured scope (#64) 2022-12-23 22:51:23 +08:00
lencx
f411541a76 Merge pull request #66 from lencx/fix 2022-12-23 22:41:09 +08:00
lencx
ca3badc783 v0.6.3 2022-12-23 22:27:46 +08:00
lencx
d7328f576a v0.6.3 2022-12-23 22:27:37 +08:00
lencx
eaf72e2b73 fix: action 2022-12-23 22:23:35 +08:00
lencx
bd2c4fff5c fix: action 2022-12-23 22:10:27 +08:00
lencx
3ca66cf309 v0.6.3 2022-12-23 21:56:45 +08:00
lencx
44c91bc85c fix: path not allowed on the configured scope 2022-12-23 21:56:16 +08:00
lencx
a75ae5e615 Merge pull request #65 from lencx/fix 2022-12-23 21:46:01 +08:00
lencx
8193104853 v0.6.2 2022-12-23 21:31:08 +08:00
lencx
11e07e87d4 fix: path not allowed on the configured scope 2022-12-23 21:30:08 +08:00
lencx
7b8f29534b Merge pull request #63 from lencx/fix 2022-12-23 20:21:34 +08:00
lencx
e4e56c7dbb v0.6.1 2022-12-23 20:04:59 +08:00
lencx
8a79c28398 readme 2022-12-23 20:04:42 +08:00
lencx
a7c4545dbf fix: path not allowed on the configured scope 2022-12-23 20:02:32 +08:00
lencx
d93079f682 Merge pull request #62 from lencx/dev 2022-12-23 19:20:38 +08:00
lencx
44dcdba10f v0.6.0 (#54 #55) 2022-12-23 19:18:40 +08:00
lencx
6e2d395156 v0.6.0 2022-12-23 19:00:26 +08:00
lencx
990aa31437 v0.6.0 2022-12-23 19:00:16 +08:00
lencx
a73d203983 readme 2022-12-23 18:58:10 +08:00
lencx
2a9fba7d27 chore: menu sync 2022-12-23 18:52:56 +08:00
lencx
e4be2bc2f3 readme 2022-12-23 17:51:41 +08:00
lencx
389e00a5e0 chore: sync 2022-12-23 15:27:05 +08:00
lencx
2be560e69a chore: sync record 2022-12-23 02:23:36 +08:00
lencx
6abe7c783e chore: sync 2022-12-23 00:44:08 +08:00
lencx
8319eae519 chore: sync 2022-12-23 00:43:58 +08:00
lencx
921d670f53 feat: the slash command is triggered by the enter key 2022-12-22 22:09:54 +08:00
lencx
39a8d8d297 fix: windows conf (#58) 2022-12-22 09:06:19 +08:00
lencx
2d826c90a0 chore: sync 2022-12-22 08:59:58 +08:00
lencx
d513a50e27 chore: sync 2022-12-21 14:00:42 +08:00
lencx
878bb6c265 feat: optimize pdf size 2022-12-20 20:04:57 +08:00
lencx
69f1968e88 chore: model 2022-12-20 14:49:56 +08:00
lencx
3a0ee7d4d6 Merge pull request #53 from lencx/dev 2022-12-20 01:31:53 +08:00
44 changed files with 1859 additions and 631 deletions

View File

@@ -8,7 +8,7 @@ on:
jobs: jobs:
create-release: create-release:
runs-on: ubuntu-latest runs-on: ubuntu-20.04
outputs: outputs:
RELEASE_UPLOAD_ID: ${{ steps.create_release.outputs.id }} RELEASE_UPLOAD_ID: ${{ steps.create_release.outputs.id }}
@@ -19,7 +19,7 @@ jobs:
shell: bash shell: bash
run: | run: |
echo "using version tag ${GITHUB_REF:10}" echo "using version tag ${GITHUB_REF:10}"
echo ::set-output name=version::"${GITHUB_REF:10}" echo "version=${GITHUB_REF:10}" >> $GITHUB_ENV
- name: Create Release - name: Create Release
id: create_release id: create_release
@@ -27,8 +27,8 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
tag_name: '${{ steps.get_version.outputs.VERSION }}' tag_name: '${{ env.version }}'
release_name: 'ChatGPT ${{ steps.get_version.outputs.VERSION }}' release_name: 'ChatGPT ${{ env.version }}'
body: 'See the assets to download this version and install.' body: 'See the assets to download this version and install.'
build-tauri: build-tauri:
@@ -36,47 +36,32 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
platform: [macos-latest, ubuntu-latest, windows-latest] platform: [macos-latest, ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: setup node
- name: Setup node uses: actions/setup-node@v3
uses: actions/setup-node@v1
with: with:
node-version: 18 node-version: 16
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
# Rust cache
- uses: Swatinem/rust-cache@v1
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
- name: install dependencies (ubuntu only) - name: install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-latest' if: matrix.platform == 'ubuntu-20.04'
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libgtk-3-dev webkit2gtk-4.0 libappindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
- name: Yarn cache
uses: actions/cache@v2
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install app dependencies and build it - name: Install app dependencies and build it
run: yarn && yarn build:fe run: yarn && yarn build:fe
- name: fix tray icon
if: matrix.platform != 'macos-latest'
run: |
yarn fix:tray
- uses: tauri-apps/tauri-action@v0.3 - uses: tauri-apps/tauri-action@v0.3
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -87,7 +72,7 @@ jobs:
releaseId: ${{ needs.create-release.outputs.RELEASE_UPLOAD_ID }} releaseId: ${{ needs.create-release.outputs.RELEASE_UPLOAD_ID }}
updater: updater:
runs-on: ubuntu-latest runs-on: ubuntu-20.04
needs: [create-release, build-tauri] needs: [create-release, build-tauri]
steps: steps:

View File

@@ -22,9 +22,9 @@
**最新版:** **最新版:**
- `Mac`: [ChatGPT_0.5.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64.dmg) - `Mac`: [ChatGPT_0.7.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64.dmg)
- `Linux`: [chat-gpt_0.5.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/chat-gpt_0.5.1_amd64.deb) - `Linux`: [chat-gpt_0.7.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/chat-gpt_0.7.4_amd64.deb)
- `Windows`: [ChatGPT_0.5.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64_en-US.msi) - `Windows`: [ChatGPT_0.7.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases) [其他版本...](https://github.com/lencx/ChatGPT/releases)
@@ -34,18 +34,18 @@
Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_ Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
~~~ sh ```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine brew install --cask chatgpt --no-quarantine
~~~ ```
Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this: Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this:
~~~ rb ```rb
repo = "lencx/chatgpt" repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git" tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true } cask "popcorn-time", args: { "no-quarantine": true }
~~~ ```
## 📢 公告 ## 📢 公告
@@ -58,13 +58,12 @@ cask "popcorn-time", args: { "no-quarantine": true }
![chat cmd](./assets/chat-cmd-1.png) ![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png) ![chat cmd](./assets/chat-cmd-2.png)
数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 <!-- 数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 -->
项目会维护一份常用命令,您也可以直接将 [chat.model.json](https://github.com/lencx/ChatGPT/blob/main/chat.model.json) 复制到你的本地目录 `~/.chatgpt/chat.model.json` 在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。斜杠命令下,使用 TAB 键修改 `{q}` 标签内容(仅支持单个修改 [#54](https://github.com/lencx/ChatGPT/issues/54))。使用键盘 `⇧``⇩`(上下键)来选择斜杠指令
在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。
![chatgpt](assets/chatgpt.gif) ![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif)
## ✨ 功能概览 ## ✨ 功能概览
@@ -74,6 +73,8 @@ cask "popcorn-time", args: { "no-quarantine": true }
- 丰富的快捷键 - 丰富的快捷键
- 系统托盘悬浮窗 - 系统托盘悬浮窗
- 应用菜单功能强大 - 应用菜单功能强大
- 支持斜杠命令及其配置(可手动配置或从文件同步 [#55](https://github.com/lencx/ChatGPT/issues/55)
- 自定义全局快捷键 ([#108](https://github.com/lencx/ChatGPT/issues/108))
### 菜单项 ### 菜单项
@@ -99,18 +100,68 @@ cask "popcorn-time", args: { "no-quarantine": true }
- `Report Bug`: 报告 BUG 或反馈建议 - `Report Bug`: 报告 BUG 或反馈建议
- `Toggle Developer Tools`: 网站调试工具,调试页面或脚本可能需要 - `Toggle Developer Tools`: 网站调试工具,调试页面或脚本可能需要
## 应用配置
| 平台 | 路径 |
| ------- | ------------------------- |
| Linux | `/home/lencx/.chatgpt` |
| macOS | `/Users/lencx/.chatgpt` |
| Windows | `C:\Users\lencx\.chatgpt` |
- `[.chatgpt]` - 应用配置根路径
- `chat.conf.json` - 应用喜好配置
- `chat.model.json` - ChatGPT 输入提示,通过斜杠命令来快速完成输入,主要包含三部分:
- `user_custom` - 需要手动录入 (**Control Conter -> Language Model -> User Custom**)
- `sync_prompts` - 从 [f/awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 同步数据 (**Control Conter -> Language Model -> Sync Prompts**)
- `sync_custom` - 同步自定义的 json 或 csv 文件数据,支持本地和远程 (**Control Conter -> Language Model -> Sync Custom**)
- `chat.model.cmd.json` - 过滤(是否启用)和排序处理后的斜杠命令数据
- `[cache_model]` - 缓存同步或录入的数据
- `chatgpt_prompts.json` - 缓存 `sync_prompts` 数据
- `user_custom.json` - 缓存 `user_custom` 数据
- `ae6cf32a6f8541b499d6bfe549dbfca3.json` - 随机生成的文件名,缓存 `sync_custom` 数据
- `4f695d3cfbf8491e9b1f3fab6d85715c.json` - 随机生成的文件名,缓存 `sync_custom` 数据
- `bd1b96f15a1644f7bd647cc53073ff8f.json` - 随机生成的文件名,缓存 `sync_custom` 数据
### Sync Custom
目前同步自定文件仅支持 json 和 csv且需要满足以下格式否则会导致应用异常
> JSON 格式
```json
[
{
"cmd": "a",
"act": "aa",
"prompt": "aaa aaa aaa"
},
{
"cmd": "b",
"act": "bb",
"prompt": "bbb bbb bbb"
}
]
```
> CSV 格式
```csv
"cmd","act","prompt"
"a","aa","aaa aaa aaa"
"b","bb","bbb bbb bbb"
```
## 👀 预览 ## 👀 预览
<img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/control-center.png" alt="control center"> <img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/control-center.png" alt="control center">
<img width="320" src="./assets/export.png" alt="export"> <img width="320" src="./assets/tray.png" alt="tray"> <img width="320" src="./assets/export.png" alt="export"> <img width="320" src="./assets/tray.png" alt="tray">
<img width="320" src="./assets/tray-login.png" alt="tray login"> <img width="320" src="./assets/auto-update.png" alt="auto update"> <img width="320" src="./assets/tray-login.png" alt="tray login"> <img width="320" src="./assets/auto-update.png" alt="auto update">
--- ---
<a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> <a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
## ❓常见问题 ## ❓ 常见问题
### 不能打开 ChatGPT ### 不能打开 ChatGPT

View File

@@ -11,6 +11,7 @@
[![ChatGPT downloads](https://img.shields.io/github/downloads/lencx/ChatGPT/total.svg?style=flat-square)](https://github.com/lencx/ChatGPT/releases) [![ChatGPT downloads](https://img.shields.io/github/downloads/lencx/ChatGPT/total.svg?style=flat-square)](https://github.com/lencx/ChatGPT/releases)
[![chat](https://img.shields.io/badge/chat-discord-blue?style=flat&logo=discord)](https://discord.gg/aPhCRf4zZr) [![chat](https://img.shields.io/badge/chat-discord-blue?style=flat&logo=discord)](https://discord.gg/aPhCRf4zZr)
[![lencx](https://img.shields.io/twitter/follow/lencx_.svg?style=social)](https://twitter.com/lencx_) [![lencx](https://img.shields.io/twitter/follow/lencx_.svg?style=social)](https://twitter.com/lencx_)
<!-- [![中文版 badge](https://img.shields.io/badge/%E4%B8%AD%E6%96%87-Traditional%20Chinese-blue)](./README-ZH.md) --> <!-- [![中文版 badge](https://img.shields.io/badge/%E4%B8%AD%E6%96%87-Traditional%20Chinese-blue)](./README-ZH.md) -->
[Awesome ChatGPT](./AWESOME.md) [Awesome ChatGPT](./AWESOME.md)
@@ -23,9 +24,9 @@
**Latest:** **Latest:**
- `Mac`: [ChatGPT_0.5.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64.dmg) - `Mac`: [ChatGPT_0.7.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64.dmg)
- `Linux`: [chat-gpt_0.5.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/chat-gpt_0.5.1_amd64.deb) - `Linux`: [chat-gpt_0.7.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/chat-gpt_0.7.4_amd64.deb)
- `Windows`: [ChatGPT_0.5.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.1/ChatGPT_0.5.1_x64_en-US.msi) - `Windows`: [ChatGPT_0.7.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.7.4/ChatGPT_0.7.4_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases) [Other version...](https://github.com/lencx/ChatGPT/releases)
@@ -35,18 +36,18 @@
Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_ Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
~~~ sh ```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine brew install --cask chatgpt --no-quarantine
~~~ ```
Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this: Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this:
~~~ rb ```rb
repo = "lencx/chatgpt" repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git" tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true } cask "chatgpt", args: { "no-quarantine": true }
~~~ ```
## 📢 Announcement ## 📢 Announcement
@@ -59,13 +60,12 @@ You can look at [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-p
![chat cmd](./assets/chat-cmd-1.png) ![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png) ![chat cmd](./assets/chat-cmd-2.png)
After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). <!-- After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). -->
The project maintains a list of common commands, or you can copy [chat.model.json](https://github.com/lencx/ChatGPT/blob/main/chat.model.json) directly to your local directory `~/.chatgpt/chat.model.json` In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key. Under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported [#54](https://github.com/lencx/ChatGPT/issues/54)). Use the keyboard `⇧` (arrow up) and `⇩` (arrow down) keys to select the slash command.
In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key.
![chatgpt](assets/chatgpt.gif) ![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif)
## ✨ Features ## ✨ Features
@@ -75,7 +75,8 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
- Common shortcut keys - Common shortcut keys
- System tray hover window - System tray hover window
- Powerful menu items - Powerful menu items
- Shortcut command typing chatgpt prompt - Support for slash commands and their configuration (can be configured manually or synchronized from a file [#55](https://github.com/lencx/ChatGPT/issues/55))
- Customize global shortcuts ([#108](https://github.com/lencx/ChatGPT/issues/108))
### MenuItem ### MenuItem
@@ -101,9 +102,60 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
- `Report Bug`: Report a bug or give feedback. - `Report Bug`: Report a bug or give feedback.
- `Toggle Developer Tools`: Developer debugging tools. - `Toggle Developer Tools`: Developer debugging tools.
## Application Configuration
| Platform | Path |
| -------- | ------------------------- |
| Linux | `/home/lencx/.chatgpt` |
| macOS | `/Users/lencx/.chatgpt` |
| Windows | `C:\Users\lencx\.chatgpt` |
- `[.chatgpt]` - application configuration root folder
- `chat.conf.json` - preferences configuration
- `chat.model.json` - prompts configurationcontains three parts:
- `user_custom` - Requires manual data entry (**Control Conter -> Language Model -> User Custom**)
- `sync_prompts` - Synchronizing data from [f/awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) (**Control Conter -> Language Model -> Sync Prompts**)
- `sync_custom` - Synchronize custom json and csv file data, support local and remote (**Control Conter -> Language Model -> Sync Custom**)
- `chat.model.cmd.json` - filtered (whether to enable) and sorted slash commands
- `[cache_model]` - caching model data
- `chatgpt_prompts.json` - Cache `sync_prompts` data
- `user_custom.json` - Cache `user_custom` data
- `ae6cf32a6f8541b499d6bfe549dbfca3.json` - Randomly generated file names, cache `sync_custom` data
- `4f695d3cfbf8491e9b1f3fab6d85715c.json` - Randomly generated file names, cache `sync_custom` data
- `bd1b96f15a1644f7bd647cc53073ff8f.json` - Randomly generated file names, cache `sync_custom` data
### Sync Custom
Currently, only json and csv are supported for synchronizing custom files, and the following formats need to be met, otherwise the application will be abnormal
> JSON format:
```json
[
{
"cmd": "a",
"act": "aa",
"prompt": "aaa aaa aaa"
},
{
"cmd": "b",
"act": "bb",
"prompt": "bbb bbb bbb"
}
]
```
> CSV format
```csv
"cmd","act","prompt"
"a","aa","aaa aaa aaa"
"b","bb","bbb bbb bbb"
```
## TODO ## TODO
- Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20)) <!-- - Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20)) -->
- `Control Center` - Feature Enhancements - `Control Center` - Feature Enhancements
- ... - ...

View File

@@ -1,5 +1,59 @@
# UPDATE LOG # UPDATE LOG
## v0.7.4
fix:
- trying to resolve linux errors: `error while loading shared libraries`
- customize global shortcuts (`Menu -> Preferences -> Control Center -> General -> Global Shortcut`)
## v0.7.3
chore:
- optimize slash command style
- optimize tray menu icon and button icons
- global shortcuts to the chatgpt app (mac: `Command + Shift + O`, windows: `Ctrl + Shift + O`)
## v0.7.2
fix: some windows systems cannot start the application
## v0.7.1
fix:
- some windows systems cannot start the application
- windows and linux add about menu (show version information)
- the tray icon is indistinguishable from the background in dark mode on window and linux
## v0.7.0
fix:
- mac m1 copy/paste does not work on some system versions
- optimize the save chat log button to a small icon, the tray window no longer provides a save chat log button (the buttons causes the input area to become larger and the content area to become smaller)
feat:
- use the keyboard `⇧` (arrow up) and `⇩` (arrow down) keys to select the slash command
<!-- - global shortcuts to the chatgpt app (mac: command+shift+o, windows: ctrl+shift+o) -->
## v0.6.10
fix: sync failure on windows
## v0.6.4
fix: path not allowed on the configured scope
feat:
- optimize the generated pdf file size
- menu added `Sync Prompts`
- `Control Center` added `Sync Custom`
- the slash command is triggered by the enter key
- under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported (https://github.com/lencx/ChatGPT/issues/54)
## v0.6.0
fix:
- windows show Chinese when upgrading
## v0.5.1 ## v0.5.1
some optimization some optimization

BIN
assets/chatgpt-cmd.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 MiB

View File

@@ -1,6 +1,6 @@
cask "chatgpt" do cask "chatgpt" do
version "0.1.7" version "0.6.10"
sha256 "1320b30a67e2506f9b45ffd2a48243d6141171c231dd698994ae5156a637eb3f" sha256 "e85062565f826d32219c53b184d6df9c89441d4231cdfff775c2de8c50ac9906"
url "https://github.com/lencx/ChatGPT/releases/download/v#{version}/ChatGPT_#{version}_x64.dmg" url "https://github.com/lencx/ChatGPT/releases/download/v#{version}/ChatGPT_#{version}_x64.dmg"
name "ChatGPT" name "ChatGPT"

View File

@@ -1,82 +0,0 @@
{
"name": "ChatGPT Model",
"link": "https://github.com/lencx/ChatGPT/blob/main/chat.model.md",
"data": [
{
"cmd": "cfm",
"act": "Character from Movie/Book/Anything",
"tags": [],
"enable": true,
"prompt": "I want you to act like {Character} from {series}. I want you to respond and answer like {Character}. do not write any explanations. only answer like {character}. You must know all of the knowledge of {character}. My first sentence is “Hi Character”"
},
{
"cmd": "pc",
"act": "Plagiarism Checker",
"tags": [],
"enable": true,
"prompt": "I want you to act as a plagiarism checker. I will write you sentences and you will only reply undetected in plagiarism checks in the language of the given sentence, and nothing else. Do not write explanations on replies. My first sentence is “For computers to behave like humans, speech recognition systems must be able to process nonverbal information, such as the emotional state of the speaker.”"
},
{
"cmd": "drunk",
"act": "Drunk",
"tags": [],
"enable": true,
"prompt": "Lets play a game. You will role play as if you were drunk. Make your answers incoherent."
},
{
"cmd": "tg",
"act": "Travel Guide",
"tags": [],
"enable": true,
"prompt": "I want you to act as a travel guide. I will write you my location and you will suggest a place to visit near my location. In some cases, I will also give you the type of places I will visit. You will also suggest me places of similar type that are close to my first location. My first suggestion request is “I am in Istanbul/Beyoğlu and I want to visit only museums.”"
},
{
"cmd": "eph",
"act": "English Pronunciation Helper",
"tags": [],
"enable": true,
"prompt": "I want you to act as an English pronunciation assistant for Turkish speaking people. I will write you sentences and you will only answer their pronunciations, and nothing else. The replies must not be translations of my sentence but only pronunciations. Pronunciations should use Turkish Latin letters for phonetics. Do not write explanations on replies. My first sentence is “how the weather is in Istanbul?”"
},
{
"cmd": "excel",
"act": "Excel Sheet",
"tags": [],
"enable": true,
"prompt": "I want you to act as a text based excel. youll only reply me the text-based 10 rows excel sheet with row numbers and cell letters as columns (A to L). First column header should be empty to reference row number. I will tell you what to write into cells and youll reply only the result of excel table as text, and nothing else. Do not write explanations. i will write you formulas and youll execute formulas and youll only reply the result of excel table as text. First, reply me the empty sheet."
},
{
"cmd": "console",
"act": "JavaScript Console",
"tags": [],
"enable": true,
"prompt": "I want you to act as a javascript console. I will type commands and you will reply with what the javascript console should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}. my first command is console.log(“Hello World”);"
},
{
"cmd": "pi",
"act": "position Interviewer",
"tags": [],
"enable": true,
"prompt": "I want you to act as an interviewer. I will be the candidate and you will ask me the interview questions for the position position. I want you to only reply as the interviewer. Do not write all the conservation at once. I want you to only do the interview with me. Ask me the questions and wait for my answers. Do not write explanations. Ask me the questions one by one like an interviewer does and wait for my answers. My first sentence is “Hi”"
},
{
"cmd": "trans",
"act": "English Translator and Improver",
"tags": [
"tools",
"cx",
"x"
],
"enable": true,
"prompt": "I want you to act as an English translator, spelling corrector and improver. I will speak to you in any language and you will detect the language, translate it and answer in the corrected and improved version of my text, in English. I want you to replace my simplified A0-level words and sentences with more beautiful and elegant, upper level English words and sentences. Keep the meaning same, but make them more literary. I want you to only reply the correction, the improvements and nothing else, do not write explanations. My first sentence is \"istanbulu cok seviyom burada olmak cok guzel\""
},
{
"cmd": "terminal",
"act": "Linux Terminal",
"tags": [
"dev"
],
"enable": true,
"prompt": "i want you to act as a linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}. my first command is pwd"
}
]
}

View File

@@ -8,6 +8,8 @@
"build": "yarn tauri build", "build": "yarn tauri build",
"updater": "tr updater", "updater": "tr updater",
"release": "tr release --git", "release": "tr release --git",
"fix:tray": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon-light.png\" --json.tauri_systemTray_iconAsTemplate=false",
"fix:tray:mac": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon.png\" --json.tauri_systemTray_iconAsTemplate=true",
"download": "node ./scripts/download.js", "download": "node ./scripts/download.js",
"tr": "tr", "tr": "tr",
"tauri": "tauri" "tauri": "tauri"
@@ -32,7 +34,7 @@
"dependencies": { "dependencies": {
"@ant-design/icons": "^4.8.0", "@ant-design/icons": "^4.8.0",
"@tauri-apps/api": "^1.2.0", "@tauri-apps/api": "^1.2.0",
"antd": "^5.0.6", "antd": "^5.1.0",
"dayjs": "^1.11.7", "dayjs": "^1.11.7",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"react": "^18.2.0", "react": "^18.2.0",

View File

@@ -17,11 +17,16 @@ tauri-build = {version = "1.2.1", features = [] }
anyhow = "1.0.66" anyhow = "1.0.66"
serde_json = "1.0" serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] } tauri = { version = "1.2.3", features = ["api-all", "devtools", "global-shortcut", "system-tray", "updater"] }
tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] } tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] }
log = "0.4.17" log = "0.4.17"
csv = "1.1.6" csv = "1.1.6"
thiserror = "1.0.38" thiserror = "1.0.38"
walkdir = "2.3.2"
regex = "1.7.0"
tokio = { version = "1.23.0", features = ["macros"] }
reqwest = "0.11.13"
wry = "0.23.4"
[dependencies.tauri-plugin-log] [dependencies.tauri-plugin-log]
git = "https://github.com/tauri-apps/tauri-plugin-log" git = "https://github.com/tauri-apps/tauri-plugin-log"
@@ -41,4 +46,4 @@ custom-protocol = [ "tauri/custom-protocol" ]
[profile.release] [profile.release]
strip = true strip = true
lto = true lto = true
opt-level = "z" opt-level = "s"

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

View File

@@ -1,5 +1,9 @@
use crate::{conf::ChatConfJson, utils}; use crate::{
use std::{fs, path::PathBuf}; conf::{ChatConfJson, GITHUB_PROMPTS_CSV_URL},
utils::{self, exists},
};
use log::info;
use std::{collections::HashMap, fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager}; use tauri::{api, command, AppHandle, Manager};
#[command] #[command]
@@ -66,14 +70,15 @@ pub fn open_file(path: PathBuf) {
} }
#[command] #[command]
pub fn get_chat_model() -> serde_json::Value { pub fn get_chat_model_cmd() -> serde_json::Value {
let path = utils::chat_root().join("chat.model.json"); let path = utils::chat_root().join("chat.model.cmd.json");
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string()); let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap() serde_json::from_str(&content).unwrap()
} }
#[derive(Debug, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord { pub struct PromptRecord {
pub cmd: Option<String>,
pub act: String, pub act: String,
pub prompt: String, pub prompt: String,
} }
@@ -83,8 +88,189 @@ pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes()); let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![]; let mut list = vec![];
for result in rdr.deserialize() { for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap(); let record: PromptRecord = result.unwrap_or_else(|err| {
list.push(record); info!("parse_prompt_error: {}", err);
PromptRecord {
cmd: None,
act: "".to_string(),
prompt: "".to_string(),
}
});
if !record.act.is_empty() {
list.push(record);
}
} }
list list
} }
#[command]
pub fn window_reload(app: AppHandle, label: &str) {
app.app_handle()
.get_window(label)
.unwrap()
.eval("window.location.reload()")
.unwrap();
}
use utils::chat_root;
use walkdir::WalkDir;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord {
pub cmd: String,
pub act: String,
pub prompt: String,
pub tags: Vec<String>,
pub enable: bool,
}
#[command]
pub fn cmd_list() -> Vec<ModelRecord> {
let mut list = vec![];
for entry in WalkDir::new(chat_root().join("cache_model"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter().filter(|v| v.enable);
list.extend(enable_list)
}
}
// dbg!(&list);
list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len()));
list
}
#[command]
pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(GITHUB_PROMPTS_CSV_URL, Some(&app))
.await
.unwrap();
if let Some(v) = res {
let data = parse_prompt(v)
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
let data2 = data.clone();
let model = chat_root().join("chat.model.json");
let model_cmd = chat_root().join("chat.model.cmd.json");
let chatgpt_prompts = chat_root().join("cache_model").join("chatgpt_prompts.json");
if !exists(&model) {
fs::write(
&model,
serde_json::json!({
"name": "ChatGPT Model",
"link": "https://github.com/lencx/ChatGPT"
})
.to_string(),
)
.unwrap();
}
// chatgpt_prompts.json
fs::write(
chatgpt_prompts,
serde_json::to_string_pretty(&data).unwrap(),
)
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
fs::write(
model_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
"data": cmd_data,
}))
.unwrap(),
)
.unwrap();
let mut kv = HashMap::new();
kv.insert(
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// refresh window
api::dialog::message(
app.get_window("core").as_ref(),
"Sync Prompts",
"ChatGPT Prompts data has been synchronized!",
);
window_reload(app, "core");
return Some(data2);
}
None
}
#[command]
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(&url, None).await.unwrap_or_else(|err| {
info!("chatgpt_http_error: {}", err);
None
});
info!("chatgpt_http_url: {}", url);
if let Some(v) = res {
let data;
if data_type == "csv" {
info!("chatgpt_http_csv_parse");
data = parse_prompt(v);
} else if data_type == "json" {
info!("chatgpt_http_json_parse");
data = serde_json::from_str(&v).unwrap_or_else(|err| {
info!("chatgpt_http_json_parse_error: {}", err);
vec![]
});
} else {
info!("chatgpt_http_unknown_type");
data = vec![];
}
let data = data
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["user-sync".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
return Some(data);
}
None
}

View File

@@ -3,30 +3,39 @@ use crate::{
utils, utils,
}; };
use tauri::{ use tauri::{
AboutMetadata, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, SystemTrayEvent,
SystemTrayEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent,
}; };
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt}; use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
#[cfg(target_os = "macos")]
use tauri::AboutMetadata;
use super::window;
// --- Menu // --- Menu
pub fn init() -> Menu { pub fn init() -> Menu {
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
let name = "ChatGPT"; let name = "ChatGPT";
let app_menu = Submenu::new( let app_menu = Submenu::new(
name, name,
Menu::new() Menu::with_items([
.add_native_item(MenuItem::About(name.into(), AboutMetadata::default())) #[cfg(target_os = "macos")]
.add_native_item(MenuItem::Services) MenuItem::About(name.into(), AboutMetadata::default()).into(),
.add_native_item(MenuItem::Separator) #[cfg(not(target_os = "macos"))]
.add_native_item(MenuItem::Hide) CustomMenuItem::new("about".to_string(), "About ChatGPT").into(),
.add_native_item(MenuItem::HideOthers) MenuItem::Services.into(),
.add_native_item(MenuItem::ShowAll) MenuItem::Hide.into(),
.add_native_item(MenuItem::Separator) MenuItem::HideOthers.into(),
.add_native_item(MenuItem::Quit), MenuItem::ShowAll.into(),
MenuItem::Separator.into(),
MenuItem::Quit.into(),
]),
); );
let stay_on_top = let stay_on_top =
CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T"); CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T");
#[cfg(target_os = "macos")]
let titlebar = let titlebar =
CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B"); CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B");
let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light"); let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light");
@@ -38,6 +47,7 @@ pub fn init() -> Menu {
} else { } else {
stay_on_top stay_on_top
}; };
#[cfg(target_os = "macos")]
let titlebar_menu = if chat_conf.titlebar { let titlebar_menu = if chat_conf.titlebar {
titlebar.selected() titlebar.selected()
} else { } else {
@@ -47,6 +57,10 @@ pub fn init() -> Menu {
let preferences_menu = Submenu::new( let preferences_menu = Submenu::new(
"Preferences", "Preferences",
Menu::with_items([ Menu::with_items([
CustomMenuItem::new("control_center".to_string(), "Control Center")
.accelerator("CmdOrCtrl+Shift+P")
.into(),
MenuItem::Separator.into(),
Submenu::new( Submenu::new(
"Theme", "Theme",
Menu::new() Menu::new()
@@ -67,13 +81,11 @@ pub fn init() -> Menu {
titlebar_menu.into(), titlebar_menu.into(),
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
CustomMenuItem::new("hide_dock_icon".to_string(), "Hide Dock Icon").into(), CustomMenuItem::new("hide_dock_icon".to_string(), "Hide Dock Icon").into(),
MenuItem::Separator.into(),
CustomMenuItem::new("inject_script".to_string(), "Inject Script") CustomMenuItem::new("inject_script".to_string(), "Inject Script")
.accelerator("CmdOrCtrl+J") .accelerator("CmdOrCtrl+J")
.into(), .into(),
CustomMenuItem::new("control_center".to_string(), "Control Center") MenuItem::Separator.into(),
.accelerator("CmdOrCtrl+Shift+P") CustomMenuItem::new("sync_prompts".to_string(), "Sync Prompts").into(),
.into(),
MenuItem::Separator.into(), MenuItem::Separator.into(),
CustomMenuItem::new("go_conf".to_string(), "Go to Config") CustomMenuItem::new("go_conf".to_string(), "Go to Config")
.accelerator("CmdOrCtrl+Shift+G") .accelerator("CmdOrCtrl+Shift+G")
@@ -171,13 +183,37 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
let menu_handle = core_window.menu_handle(); let menu_handle = core_window.menu_handle();
match menu_id { match menu_id {
// App
"about" => {
let tauri_conf = utils::get_tauri_conf().unwrap();
tauri::api::dialog::message(
app.get_window("core").as_ref(),
"ChatGPT",
format!("Version {}", tauri_conf.package.version.unwrap()),
);
}
// Preferences // Preferences
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&app.env()), "restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, script_path), "inject_script" => open(&app, script_path),
"go_conf" => utils::open_file(utils::chat_root()), "go_conf" => utils::open_file(utils::chat_root()),
"clear_conf" => utils::clear_conf(&app), "clear_conf" => utils::clear_conf(&app),
"awesome" => open(&app, conf::AWESOME_URL.to_string()), "awesome" => open(&app, conf::AWESOME_URL.to_string()),
"sync_prompts" => {
tauri::api::dialog::ask(
app.get_window("core").as_ref(),
"Sync Prompts",
"Data sync will enable all prompts, are you sure you want to sync?",
move |is_restart| {
if is_restart {
app.get_window("core")
.unwrap()
.eval("window.__sync_prompts && window.__sync_prompts()")
.unwrap()
}
},
);
}
"hide_dock_icon" => { "hide_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": true }), Some(app)).unwrap() ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": true }), Some(app)).unwrap()
} }
@@ -243,20 +279,32 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
// --- SystemTray Menu // --- SystemTray Menu
pub fn tray_menu() -> SystemTray { pub fn tray_menu() -> SystemTray {
if cfg!(target_os = "macos") {
return SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
);
}
SystemTray::new().with_menu( SystemTray::new().with_menu(
SystemTrayMenu::new() SystemTrayMenu::new()
.add_item(CustomMenuItem::new( .add_item(CustomMenuItem::new(
"control_center".to_string(), "control_center".to_string(),
"Control Center", "Control Center",
)) ))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator) .add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")), .add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
) )
@@ -287,7 +335,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
} }
} }
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() { SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => app.get_window("main").unwrap().show().unwrap(), "control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&handle.env()), "restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => { "show_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app)) ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))

View File

@@ -1,22 +1,55 @@
use crate::{app::window, conf::ChatConfJson, utils}; use crate::{app::window, conf::ChatConfJson, utils};
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, Manager}; use log::info;
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, GlobalShortcutManager, Manager};
use wry::application::accelerator::Accelerator;
pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> { pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> {
info!("stepup");
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
let url = chat_conf.origin.to_string(); let url = chat_conf.origin.to_string();
let theme = ChatConfJson::theme(); let theme = ChatConfJson::theme();
let handle = app.app_handle(); let handle = app.app_handle();
std::thread::spawn(move || { tokio::spawn(async move {
window::tray_window(&handle); window::tray_window(&handle);
}); });
if let Some(v) = chat_conf.global_shortcut {
info!("global_shortcut: `{}`", v);
match v.parse::<Accelerator>() {
Ok(_) => {
info!("global_shortcut_register");
let handle = app.app_handle();
let mut shortcut = app.global_shortcut_manager();
shortcut
.register(&v, move || {
if let Some(w) = handle.get_window("core") {
if w.is_visible().unwrap() {
w.hide().unwrap();
} else {
w.show().unwrap();
w.set_focus().unwrap();
}
}
})
.unwrap_or_else(|err| {
info!("global_shortcut_register_error: {}", err);
});
}
Err(err) => {
info!("global_shortcut_parse_error: {}", err);
}
}
} else {
info!("global_shortcut_unregister");
};
if chat_conf.hide_dock_icon { if chat_conf.hide_dock_icon {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
app.set_activation_policy(tauri::ActivationPolicy::Accessory); app.set_activation_policy(tauri::ActivationPolicy::Accessory);
} else { } else {
let app = app.handle(); let app = app.handle();
std::thread::spawn(move || { tokio::spawn(async move {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
WindowBuilder::new(&app, "core", WindowUrl::App(url.into())) WindowBuilder::new(&app, "core", WindowUrl::App(url.into()))
.title("ChatGPT") .title("ChatGPT")

View File

@@ -6,7 +6,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
let theme = conf::ChatConfJson::theme(); let theme = conf::ChatConfJson::theme();
let app = handle.clone(); let app = handle.clone();
std::thread::spawn(move || { tokio::spawn(async move {
WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into())) WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into()))
.title("ChatGPT") .title("ChatGPT")
.resizable(false) .resizable(false)
@@ -16,10 +16,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.always_on_top(true) .always_on_top(true)
.theme(theme) .theme(theme)
.initialization_script(&utils::user_script()) .initialization_script(&utils::user_script())
.initialization_script(include_str!("../assets/html2canvas.js"))
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../assets/core.js")) .initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js")) .initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_tray) .user_agent(&chat_conf.ua_tray)
.build() .build()
@@ -28,3 +25,17 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.unwrap(); .unwrap();
}); });
} }
pub fn control_window(handle: &tauri::AppHandle) {
let app = handle.clone();
tokio::spawn(async move {
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
.title("Control Center")
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
.min_inner_size(800.0, 600.0)
.build()
.unwrap();
});
}

View File

@@ -13,13 +13,30 @@ function init() {
z-index: 9999; z-index: 9999;
} }
.chat-model-cmd-list>div { .chat-model-cmd-list>div {
border: solid 2px #d8d8d8; border: solid 2px rgba(80,80,80,.3);
border-radius: 5px; border-radius: 5px;
background-color: #fff; background-color: #fff;
} }
html.dark .chat-model-cmd-list>div {
background-color: #4a4a4a;
}
html.dark .chat-model-cmd-list .cmd-item {
border-color: #666;
}
html.dark .chat-model-cmd-list .cmd-item b {
color: #e8e8e8;
}
html.dark .chat-model-cmd-list .cmd-item i {
color: #999;
}
html.dark .chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.5);
}
.chat-model-cmd-list .cmd-item { .chat-model-cmd-list .cmd-item {
font-size: 12px; font-size: 12px;
border-bottom: solid 1px #888; border-bottom: solid 1px rgba(80,80,80,.2);
padding: 2px 4px; padding: 2px 4px;
display: flex; display: flex;
user-select: none; user-select: none;
@@ -28,6 +45,9 @@ function init() {
.chat-model-cmd-list .cmd-item:last-child { .chat-model-cmd-list .cmd-item:last-child {
border-bottom: none; border-bottom: none;
} }
.chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.3);
}
.chat-model-cmd-list .cmd-item b { .chat-model-cmd-list .cmd-item b {
display: inline-block; display: inline-block;
width: 100px; width: 100px;
@@ -46,7 +66,16 @@ function init() {
white-space: nowrap; white-space: nowrap;
text-align: right; text-align: right;
color: #888; color: #888;
}`; }
.chatappico {
width: 20px;
height: 20px;
}
.chatappico.pdf {
width: 24px;
height: 24px;
}
`;
document.head.append(styleDom); document.head.append(styleDom);
if (window.formInterval) { if (window.formInterval) {
@@ -61,10 +90,8 @@ function init() {
} }
async function cmdTip() { async function cmdTip() {
const chatModelJson = await invoke('get_chat_model') || {}; const chatModelJson = await invoke('get_chat_model_cmd') || {};
const user_custom = chatModelJson.user_custom || []; const data = chatModelJson.data;
const sys_sync_prompts = chatModelJson.sys_sync_prompts || [];
const data = [...user_custom, ...sys_sync_prompts];
if (data.length <= 0) return; if (data.length <= 0) return;
const modelDom = document.createElement('div'); const modelDom = document.createElement('div');
@@ -72,32 +99,120 @@ async function cmdTip() {
// fix: tray window // fix: tray window
if (__TAURI_METADATA__.__currentWindow.label === 'tray') { if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
modelDom.style.bottom = '40px'; modelDom.style.bottom = '54px';
} }
document.querySelector('form').appendChild(modelDom); document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`; const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-cmd="${v.cmd}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const renderList = (v) => {
modelDom.innerHTML = `<div>${v.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = v[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = v[0]?.cmd.trim();
window.__list = modelDom.querySelectorAll('.cmd-item');
window.__index = 0;
window.__list[window.__index].classList.add('selected');
};
const setPrompt = (v = '') => {
if (v.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${v.trim()}}`);
}
}
const searchInput = document.querySelector('form textarea'); const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`. // Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.
// If more than one command appears in the search results, the first one will be used by default. // If more than one command appears in the search results, the first one will be used by default.
searchInput.addEventListener('keydown', (event) => { searchInput.addEventListener('keydown', (event) => {
if (!window.__CHAT_MODEL_CMD__) { if (!window.__CHAT_MODEL_CMD_PROMPT__) {
return; return;
} }
if (event.keyCode === 32) { // ------------------ Keyboard scrolling (ArrowUp | ArrowDown) --------------------------
searchInput.value = window.__CHAT_MODEL_CMD__; if (event.keyCode === 38 && window.__index > 0) { // ArrowUp
modelDom.innerHTML = ''; window.__list[window.__index].classList.remove('selected');
delete window.__CHAT_MODEL_CMD__; window.__index = window.__index - 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
} }
if (event.keyCode === 13) {
if (event.keyCode === 40 && window.__index < window.__list.length - 1) { // ArrowDown
window.__list[window.__index].classList.remove('selected');
window.__index = window.__index + 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
}
const containerHeight = modelDom.offsetHeight;
const itemHeight = window.__list[0].offsetHeight + 1;
const itemTop = window.__list[window.__index].offsetTop;
const itemBottom = itemTop + itemHeight;
if (itemTop < modelDom.scrollTop || itemBottom > modelDom.scrollTop + containerHeight) {
modelDom.scrollTop = itemTop;
}
// ------------------ TAB key replaces `{q}` tag content -------------------------------
// feat: https://github.com/lencx/ChatGPT/issues/54
if (event.keyCode === 9 && !window.__CHAT_MODEL_STATUS__) {
const strGroup = window.__CHAT_MODEL_CMD_PROMPT__.match(/\{([^{}]*)\}/) || [];
if (strGroup[1]) {
searchInput.value = `/${window.__CHAT_MODEL_CMD__}` + ` {${strGroup[1]}}` + ' |-> ';
window.__CHAT_MODEL_STATUS__ = 1;
}
event.preventDefault();
}
if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) { // TAB
const data = searchInput.value.split('|->');
if (data[1]?.trim()) {
setPrompt(data[1]);
window.__CHAT_MODEL_STATUS__ = 2;
}
event.preventDefault();
}
// input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) { // TAB
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__;
event.preventDefault();
}
// ------------------ type in a space to complete the fill ------------------------------------
if (event.keyCode === 32) {
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;
}
// ------------------ send --------------------------------------------------------------------
if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) { // Enter
const data = searchInput.value.split('|->');
setPrompt(data[1]);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__; delete window.__CHAT_MODEL_CMD__;
delete window.__CHAT_MODEL_STATUS__;
event.preventDefault();
} }
}); });
searchInput.addEventListener('input', (event) => { searchInput.addEventListener('input', () => {
if (searchInput.value === '') {
delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__;
delete window.__CHAT_MODEL_STATUS__;
}
if (window.__CHAT_MODEL_STATUS__) return;
const query = searchInput.value; const query = searchInput.value;
if (!query || !/^\//.test(query)) { if (!query || !/^\//.test(query)) {
modelDom.innerHTML = ''; modelDom.innerHTML = '';
@@ -106,19 +221,18 @@ async function cmdTip() {
// all cmd result // all cmd result
if (query === '/') { if (query === '/') {
const result = data.filter(i => i.enable); renderList(data);
modelDom.innerHTML = `<div>${result.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD__ = result[0]?.prompt.trim();
return; return;
} }
const result = data.filter(i => i.enable && new RegExp(query.substring(1)).test(i.cmd)); const result = data.filter(i => new RegExp(query.substring(1)).test(i.cmd));
if (result.length > 0) { if (result.length > 0) {
modelDom.innerHTML = `<div>${result.map(itemDom).join('')}</div>`; renderList(result);
window.__CHAT_MODEL_CMD__ = result[0]?.prompt.trim();
} else { } else {
modelDom.innerHTML = ''; modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__; delete window.__CHAT_MODEL_CMD__;
delete window.__CHAT_MODEL_STATUS__;
} }
}, { }, {
capture: false, capture: false,
@@ -140,7 +254,7 @@ async function cmdTip() {
const val = decodeURIComponent(item.getAttribute('data-prompt')); const val = decodeURIComponent(item.getAttribute('data-prompt'));
searchInput.value = val; searchInput.value = val;
document.querySelector('form textarea').focus(); document.querySelector('form textarea').focus();
window.__CHAT_MODEL_CMD__ = val; window.__CHAT_MODEL_CMD_PROMPT__ = val;
modelDom.innerHTML = ''; modelDom.innerHTML = '';
} }
}, { }, {

View File

@@ -86,6 +86,10 @@ async function init() {
} }
} }
}); });
window.__sync_prompts = async function() {
await invoke('sync_prompts', { time: Date.now() });
}
} }
if ( if (

View File

@@ -1,7 +1,9 @@
// *** Core Script - Export *** // *** Core Script - Export ***
// @ref: https://github.com/liady/ChatGPT-pdf // @ref: https://github.com/liady/ChatGPT-pdf
const buttonOuterHTMLFallback = `<button class="btn flex justify-center gap-2 btn-neutral" id="download-png-button">Try Again</button>`;
async function init() { async function init() {
if (window.innerWidth < 767) return;
const chatConf = await invoke('get_chat_conf') || {}; const chatConf = await invoke('get_chat_conf') || {};
if (window.buttonsInterval) { if (window.buttonsInterval) {
clearInterval(window.buttonsInterval); clearInterval(window.buttonsInterval);
@@ -11,14 +13,15 @@ async function init() {
if (!actionsArea) { if (!actionsArea) {
return; return;
} }
const buttons = actionsArea.querySelectorAll("button"); if (shouldAddButtons(actionsArea)) {
const hasTryAgainButton = Array.from(buttons).some((button) => { let TryAgainButton = actionsArea.querySelector("button");
return !button.id?.includes("download"); if (!TryAgainButton) {
}); const parentNode = document.createElement("div");
if (hasTryAgainButton && buttons.length === 1) { parentNode.innerHTML = buttonOuterHTMLFallback;
const TryAgainButton = actionsArea.querySelector("button"); TryAgainButton = parentNode.querySelector("button");
}
addActionsButtons(actionsArea, TryAgainButton, chatConf); addActionsButtons(actionsArea, TryAgainButton, chatConf);
} else if (!hasTryAgainButton) { } else if (shouldRemoveButtons()) {
removeButtons(); removeButtons();
} }
}, 200); }, 200);
@@ -29,32 +32,42 @@ const Format = {
PDF: "pdf", PDF: "pdf",
}; };
function addActionsButtons(actionsArea, TryAgainButton, chatConf) { function shouldRemoveButtons() {
const downloadButton = TryAgainButton.cloneNode(true); const isOpenScreen = document.querySelector("h1.text-4xl");
downloadButton.id = "download-png-button"; if(isOpenScreen){
downloadButton.innerText = "Generate PNG"; return true;
downloadButton.onclick = () => {
downloadThread();
};
actionsArea.appendChild(downloadButton);
const downloadPdfButton = TryAgainButton.cloneNode(true);
downloadPdfButton.id = "download-pdf-button";
downloadPdfButton.innerText = "Download PDF";
downloadPdfButton.onclick = () => {
downloadThread({ as: Format.PDF });
};
actionsArea.appendChild(downloadPdfButton);
if (new RegExp('//chat.openai.com').test(chatConf.origin)) {
const exportHtml = TryAgainButton.cloneNode(true);
exportHtml.id = "download-html-button";
exportHtml.innerText = "Share Link";
exportHtml.onclick = () => {
sendRequest();
};
actionsArea.appendChild(exportHtml);
} }
const inConversation = document.querySelector("form button>div");
if(inConversation){
return true;
}
return false;
}
function shouldAddButtons(actionsArea) {
// first, check if there's a "Try Again" button and no other buttons
const buttons = actionsArea.querySelectorAll("button");
const hasTryAgainButton = Array.from(buttons).some((button) => {
return !button.id?.includes("download");
});
if (hasTryAgainButton && buttons.length === 1) {
return true;
}
// otherwise, check if open screen is not visible
const isOpenScreen = document.querySelector("h1.text-4xl");
if (isOpenScreen) {
return false;
}
// check if the conversation is finished and there are no share buttons
const finishedConversation = document.querySelector("form button>svg");
const hasShareButtons = actionsArea.querySelectorAll("button[share-ext]");
if (finishedConversation && !hasShareButtons.length) {
return true;
}
return false;
} }
function removeButtons() { function removeButtons() {
@@ -72,6 +85,39 @@ function removeButtons() {
} }
} }
function addActionsButtons(actionsArea, TryAgainButton) {
const downloadButton = TryAgainButton.cloneNode(true);
downloadButton.id = "download-png-button";
downloadButton.setAttribute("share-ext", "true");
// downloadButton.innerText = "Generate PNG";
downloadButton.title = "Generate PNG";
downloadButton.innerHTML = setIcon('png');
downloadButton.onclick = () => {
downloadThread();
};
actionsArea.appendChild(downloadButton);
const downloadPdfButton = TryAgainButton.cloneNode(true);
downloadPdfButton.id = "download-pdf-button";
downloadButton.setAttribute("share-ext", "true");
// downloadPdfButton.innerText = "Download PDF";
downloadPdfButton.title = "Download PDF";
downloadPdfButton.innerHTML = setIcon('pdf');
downloadPdfButton.onclick = () => {
downloadThread({ as: Format.PDF });
};
actionsArea.appendChild(downloadPdfButton);
const exportHtml = TryAgainButton.cloneNode(true);
exportHtml.id = "download-html-button";
downloadButton.setAttribute("share-ext", "true");
// exportHtml.innerText = "Share Link";
exportHtml.title = "Share Link";
exportHtml.innerHTML = setIcon('link');
exportHtml.onclick = () => {
sendRequest();
};
actionsArea.appendChild(exportHtml);
}
function downloadThread({ as = Format.PNG } = {}) { function downloadThread({ as = Format.PNG } = {}) {
const elements = new Elements(); const elements = new Elements();
elements.fixLocation(); elements.fixLocation();
@@ -113,7 +159,7 @@ function handlePdf(imgData, canvas, pixelRatio) {
]); ]);
var pdfWidth = pdf.internal.pageSize.getWidth(); var pdfWidth = pdf.internal.pageSize.getWidth();
var pdfHeight = pdf.internal.pageSize.getHeight(); var pdfHeight = pdf.internal.pageSize.getHeight();
pdf.addImage(imgData, "PNG", 0, 0, pdfWidth, pdfHeight); pdf.addImage(imgData, "PNG", 0, 0, pdfWidth, pdfHeight, '', 'FAST');
const data = pdf.__private__.getArrayBuffer(pdf.__private__.buildDocument()); const data = pdf.__private__.getArrayBuffer(pdf.__private__.buildDocument());
invoke('download', { name: `chatgpt-${Date.now()}.pdf`, blob: Array.from(new Uint8Array(data)) }); invoke('download', { name: `chatgpt-${Date.now()}.pdf`, blob: Array.from(new Uint8Array(data)) });
@@ -230,4 +276,12 @@ if (
init(); init();
} else { } else {
document.addEventListener("DOMContentLoaded", init); document.addEventListener("DOMContentLoaded", init);
} }
function setIcon(type) {
return {
link: `<svg class="chatappico" viewBox="0 0 1024 1024"><path d="M1007.382 379.672L655.374 75.702C624.562 49.092 576 70.694 576 112.03v160.106C254.742 275.814 0 340.2 0 644.652c0 122.882 79.162 244.618 166.666 308.264 27.306 19.862 66.222-5.066 56.154-37.262C132.132 625.628 265.834 548.632 576 544.17V720c0 41.4 48.6 62.906 79.374 36.328l352.008-304c22.142-19.124 22.172-53.506 0-72.656z" p-id="8506" fill="currentColor"></path></svg>`,
png: `<svg class="chatappico" viewBox="0 0 1070 1024"><path d="M981.783273 0H85.224727C38.353455 0 0 35.374545 0 83.083636v844.893091c0 47.616 38.353455 86.574545 85.178182 86.574546h903.633454c46.917818 0 81.733818-38.958545 81.733819-86.574546V83.083636C1070.592 35.374545 1028.701091 0 981.783273 0zM335.825455 135.912727c74.193455 0 134.330182 60.974545 134.330181 136.285091 0 75.170909-60.136727 136.192-134.330181 136.192-74.286545 0-134.516364-61.021091-134.516364-136.192 0-75.264 60.229818-136.285091 134.516364-136.285091z m-161.512728 745.937455a41.890909 41.890909 0 0 1-27.648-10.379637 43.752727 43.752727 0 0 1-4.654545-61.067636l198.097454-255.162182a42.123636 42.123636 0 0 1 57.716364-6.702545l116.549818 128.139636 286.906182-352.814545c14.615273-18.711273 90.251636-106.775273 135.866182-6.935273 0.093091-0.093091 0.093091 112.965818 0.232727 247.761455 0.093091 140.8 0.093091 317.067636 0.093091 317.067636-1.024-0.093091-762.740364 0.093091-763.112727 0.093091z" fill="currentColor"></path></svg>`,
pdf: `<svg class="chatappico pdf" viewBox="0 0 1024 1024"><path d="M821.457602 118.382249H205.725895c-48.378584 0-87.959995 39.583368-87.959996 87.963909v615.731707c0 48.378584 39.581411 87.959995 87.959996 87.959996h615.733664c48.380541 0 87.961952-39.581411 87.961952-87.959996V206.346158c-0.001957-48.378584-39.583368-87.963909-87.963909-87.963909zM493.962468 457.544987c-10.112054 32.545237-21.72487 82.872662-38.806571 124.248336-8.806957 22.378397-8.380404 18.480717-15.001764 32.609808l5.71738-1.851007c58.760658-16.443827 99.901532-20.519564 138.162194-27.561607-7.67796-6.06371-14.350194-10.751884-19.631237-15.586807-26.287817-29.101504-35.464584-34.570387-70.440002-111.862636v0.003913z m288.36767 186.413594c-7.476424 8.356924-20.670227 13.191847-40.019704 13.191847-33.427694 0-63.808858-9.229597-107.79277-31.660824-75.648648 8.356924-156.097 17.214754-201.399704 31.729308-2.199293 0.876587-4.832967 1.759043-7.916674 3.077836-54.536215 93.237125-95.031389 132.767663-130.621199 131.19646-11.286054-0.49895-27.694661-7.044-32.973748-10.11988l-6.52157-6.196764-2.29517-4.353583c-3.07588-7.91863-3.954423-15.395054-2.197337-23.751977 4.838837-23.309771 29.907651-60.251638 82.686779-93.237126 8.356924-6.159587 27.430511-15.897917 45.020944-24.25484 13.311204-21.177004 19.45905-34.744531 36.341171-72.259702 19.102937-45.324228 36.505531-99.492589 47.500041-138.191543v-0.44025c-16.267727-53.219378-25.945401-89.310095-9.67376-147.80856 3.958337-16.71189 18.46702-33.864031 34.748444-33.864031h10.552304c10.115967 0 19.791684 3.520043 26.829814 10.552304 29.029107 29.031064 15.39114 103.824649 0.8805 162.323113-0.8805 2.63563-1.322707 4.832967-1.761 6.153717 17.59239 49.697378 45.400538 98.774492 73.108895 121.647926 11.436717 8.791304 22.638634 18.899444 36.71098 26.814161 19.791684-2.20125 37.517128-4.11487 55.547812-4.11487 54.540128 0 87.525615 9.67963 100.279169 30.351814 4.400543 7.034217 6.595923 15.389184 5.281043 24.1844-0.44025 10.996467-4.39663 21.112434-12.31526 29.031064z m-27.796407-36.748157c-4.394673-4.398587-17.024957-16.936907-78.601259-16.936907-3.073923 0-10.622744-0.784623-14.57521 3.612007 32.104987 14.072347 62.830525 24.757704 83.058545 24.757703 3.083707 0 5.72325-0.442207 8.356923-0.876586h1.759044c2.20125-0.8805 3.520043-1.324663 3.960293-5.71738-0.87463-1.324663-1.757087-3.083707-3.958336-4.838837z m-387.124553 63.041845c-9.237424 5.27713-16.71189 10.112054-21.112433 13.634053-31.226444 28.586901-51.018128 57.616008-53.217422 74.331812 19.789727-6.59788 45.737084-35.626987 74.329855-87.961952v-0.003913z m125.574957-297.822284l2.197336-1.761c3.079793-14.072347 5.232127-29.189554 7.87167-38.869184l1.318794-7.036174c4.39663-25.070771 2.71781-39.720334-4.76057-50.272637l-6.59788-2.20125a57.381208 57.381208 0 0 0-3.079794 5.27713c-7.474467 18.47289-7.063567 55.283661 3.0524 94.865072l-0.001956-0.001957z" fill="currentColor"></path></svg>`
}[type];
}

View File

@@ -1,5 +1,6 @@
use crate::utils::{chat_root, create_file, exists}; use crate::utils::{chat_root, create_file, exists};
use anyhow::Result; use anyhow::Result;
use log::info;
use serde_json::Value; use serde_json::Value;
use std::{collections::BTreeMap, fs, path::PathBuf, sync::Mutex}; use std::{collections::BTreeMap, fs, path::PathBuf, sync::Mutex};
use tauri::{Manager, Theme}; use tauri::{Manager, Theme};
@@ -7,16 +8,19 @@ use tauri::{Manager, Theme};
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
use tauri::TitleBarStyle; use tauri::TitleBarStyle;
// pub const USER_AGENT: &str = "5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36"; // pub const USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.1 Safari/605.1.15";
// pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1"; // pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 16_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.0 Mobile/15E148 Safari/604.1";
pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues"; pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues";
pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md"; pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md";
pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md"; pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md";
pub const GITHUB_PROMPTS_CSV_URL: &str =
"https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv";
pub const DEFAULT_CHAT_CONF: &str = r#"{ pub const DEFAULT_CHAT_CONF: &str = r#"{
"stay_on_top": false, "stay_on_top": false,
"theme": "Light", "theme": "Light",
"titlebar": true, "titlebar": true,
"global_shortcut": "",
"hide_dock_icon": false, "hide_dock_icon": false,
"default_origin": "https://chat.openai.com", "default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com", "origin": "https://chat.openai.com",
@@ -27,6 +31,7 @@ pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{
"stay_on_top": false, "stay_on_top": false,
"theme": "Light", "theme": "Light",
"titlebar": false, "titlebar": false,
"global_shortcut": "",
"hide_dock_icon": false, "hide_dock_icon": false,
"default_origin": "https://chat.openai.com", "default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com", "origin": "https://chat.openai.com",
@@ -60,12 +65,14 @@ pub struct ChatConfJson {
pub origin: String, pub origin: String,
pub ua_window: String, pub ua_window: String,
pub ua_tray: String, pub ua_tray: String,
pub global_shortcut: Option<String>,
} }
impl ChatConfJson { impl ChatConfJson {
/// init chat.conf.json /// init chat.conf.json
/// path: ~/.chatgpt/chat.conf.json /// path: ~/.chatgpt/chat.conf.json
pub fn init() -> PathBuf { pub fn init() -> PathBuf {
info!("chat_conf_init");
let conf_file = ChatConfJson::conf_path(); let conf_file = ChatConfJson::conf_path();
let content = if cfg!(target_os = "macos") { let content = if cfg!(target_os = "macos") {
DEFAULT_CHAT_CONF_MAC DEFAULT_CHAT_CONF_MAC

View File

@@ -15,8 +15,11 @@ use tauri_plugin_log::{
LogTarget, LoggerBuilder, LogTarget, LoggerBuilder,
}; };
fn main() { #[tokio::main]
async fn main() {
ChatConfJson::init(); ChatConfJson::init();
// If the file does not exist, creating the file will block menu synchronization
utils::create_chatgpt_prompts();
let chat_conf = ChatConfJson::get_chat_conf(); let chat_conf = ChatConfJson::get_chat_conf();
let context = tauri::generate_context!(); let context = tauri::generate_context!();
let colors = ColoredLevelConfig { let colors = ColoredLevelConfig {
@@ -57,8 +60,12 @@ fn main() {
cmd::form_confirm, cmd::form_confirm,
cmd::form_msg, cmd::form_msg,
cmd::open_file, cmd::open_file,
cmd::get_chat_model, cmd::get_chat_model_cmd,
cmd::parse_prompt, cmd::parse_prompt,
cmd::sync_prompts,
cmd::sync_user_prompts,
cmd::window_reload,
cmd::cmd_list,
fs_extra::metadata, fs_extra::metadata,
]) ])
.setup(setup::init) .setup(setup::init)
@@ -72,7 +79,7 @@ fn main() {
if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() { if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() {
let win = event.window(); let win = event.window();
if win.label() == "main" { if win.label() == "main" {
win.hide().unwrap(); win.close().unwrap();
} else { } else {
// TODO: https://github.com/tauri-apps/tauri/issues/3084 // TODO: https://github.com/tauri-apps/tauri/issues/3084
// event.window().hide().unwrap(); // event.window().hide().unwrap();

View File

@@ -1,23 +1,25 @@
use anyhow::Result; use anyhow::Result;
use log::info; use log::info;
use regex::Regex;
use serde_json::Value;
use std::{ use std::{
collections::HashMap,
fs::{self, File}, fs::{self, File},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
}; };
use tauri::Manager; use tauri::{utils::config::Config, Manager};
// use tauri::utils::config::Config;
pub fn chat_root() -> PathBuf { pub fn chat_root() -> PathBuf {
tauri::api::path::home_dir().unwrap().join(".chatgpt") tauri::api::path::home_dir().unwrap().join(".chatgpt")
} }
// pub fn get_tauri_conf() -> Option<Config> { pub fn get_tauri_conf() -> Option<Config> {
// let config_file = include_str!("../tauri.conf.json"); let config_file = include_str!("../tauri.conf.json");
// let config: Config = let config: Config =
// serde_json::from_str(config_file).expect("failed to parse tauri.conf.json"); serde_json::from_str(config_file).expect("failed to parse tauri.conf.json");
// Some(config) Some(config)
// } }
pub fn exists(path: &Path) -> bool { pub fn exists(path: &Path) -> bool {
Path::new(path).exists() Path::new(path).exists()
@@ -30,6 +32,14 @@ pub fn create_file(path: &Path) -> Result<File> {
File::create(path).map_err(Into::into) File::create(path).map_err(Into::into)
} }
pub fn create_chatgpt_prompts() {
let sync_file = chat_root().join("cache_model").join("chatgpt_prompts.json");
if !exists(&sync_file) {
create_file(&sync_file).unwrap();
fs::write(&sync_file, "[]").unwrap();
}
}
pub fn script_path() -> PathBuf { pub fn script_path() -> PathBuf {
let script_file = chat_root().join("main.js"); let script_file = chat_root().join("main.js");
if !exists(&script_file) { if !exists(&script_file) {
@@ -81,3 +91,40 @@ pub fn clear_conf(app: &tauri::AppHandle) {
}, },
); );
} }
pub fn merge(v: &Value, fields: &HashMap<String, Value>) -> Value {
match v {
Value::Object(m) => {
let mut m = m.clone();
for (k, v) in fields {
m.insert(k.clone(), v.clone());
}
Value::Object(m)
}
v => v.clone(),
}
}
pub fn gen_cmd(name: String) -> String {
let re = Regex::new(r"[^a-zA-Z0-9]").unwrap();
re.replace_all(&name, "_").to_lowercase()
}
pub async fn get_data(
url: &str,
app: Option<&tauri::AppHandle>,
) -> Result<Option<String>, reqwest::Error> {
let res = reqwest::get(url).await?;
let is_ok = res.status() == 200;
let body = res.text().await?;
if is_ok {
Ok(Some(body))
} else {
info!("chatgpt_http_error: {}", body);
if let Some(v) = app {
tauri::api::dialog::message(v.get_window("core").as_ref(), "ChatGPT HTTP", body);
}
Ok(None)
}
}

View File

@@ -7,20 +7,18 @@
}, },
"package": { "package": {
"productName": "ChatGPT", "productName": "ChatGPT",
"version": "0.5.1" "version": "0.7.4"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
"all": true, "all": true,
"http": { "globalShortcut": {
"scope": [ "all": true
"https://raw.githubusercontent.com/*"
]
}, },
"fs": { "fs": {
"all": true, "all": true,
"scope": [ "scope": [
"$HOME/.chatgpt/*" "$HOME/.chatgpt/**"
] ]
} }
}, },
@@ -62,12 +60,6 @@
"webviewInstallMode": { "webviewInstallMode": {
"silent": true, "silent": true,
"type": "embedBootstrapper" "type": "embedBootstrapper"
},
"wix": {
"language": [
"zh-CN",
"en-US"
]
} }
} }
}, },
@@ -81,18 +73,6 @@
"https://lencx.github.io/ChatGPT/install.json" "https://lencx.github.io/ChatGPT/install.json"
], ],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK" "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK"
}, }
"windows": [
{
"label": "main",
"url": "index.html",
"title": "ChatGPT",
"visible": false,
"width": 800,
"height": 600,
"minWidth": 800,
"minHeight": 600
}
]
} }
} }

View File

@@ -1,23 +1,53 @@
import { useState } from 'react'; import { useState, useEffect } from 'react';
import { clone } from 'lodash'; import { clone } from 'lodash';
import { invoke } from '@tauri-apps/api';
import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils'; import { CHAT_MODEL_JSON, CHAT_MODEL_CMD_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit'; import useInit from '@/hooks/useInit';
export default function useChatModel(key: string) { export default function useChatModel(key: string, file = CHAT_MODEL_JSON) {
const [modelJson, setModelJson] = useState<Record<string, any>>({}); const [modelJson, setModelJson] = useState<Record<string, any>>({});
useInit(async () => { useInit(async () => {
const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', [key]: [] }); const data = await readJSON(file, {
defaultVal: { name: 'ChatGPT Model', [key]: null },
});
setModelJson(data); setModelJson(data);
}); });
const modelSet = async (data: Record<string, any>[]) => { const modelSet = async (data: Record<string, any>[]|Record<string, any>) => {
const oData = clone(modelJson); const oData = clone(modelJson);
oData[key] = data; oData[key] = data;
await writeJSON(CHAT_MODEL_JSON, oData); await writeJSON(file, oData);
setModelJson(oData); setModelJson(oData);
} }
return { modelJson, modelSet, modelData: modelJson?.[key] || [] } return { modelJson, modelSet, modelData: modelJson?.[key] || [] };
}
export function useCacheModel(file = '') {
const [modelCacheJson, setModelCacheJson] = useState<Record<string, any>[]>([]);
useEffect(() => {
if (!file) return;
(async () => {
const data = await readJSON(file, { isRoot: true, isList: true });
setModelCacheJson(data);
})();
}, [file]);
const modelCacheSet = async (data: Record<string, any>[], newFile = '') => {
await writeJSON(newFile ? newFile : file, data, { isRoot: true });
setModelCacheJson(data);
await modelCacheCmd();
}
const modelCacheCmd = async () => {
// Generate the `chat.model.cmd.json` file and refresh the page for the slash command to take effect.
const list = await invoke('cmd_list');
await writeJSON(CHAT_MODEL_CMD_JSON, { name: 'ChatGPT CMD', last_updated: Date.now(), data: list });
await invoke('window_reload', { label: 'core' });
};
return { modelCacheJson, modelCacheSet, modelCacheCmd };
} }

20
src/hooks/useData.ts vendored
View File

@@ -1,7 +1,7 @@
import { useState, useEffect } from 'react'; import { useState, useEffect } from 'react';
import { v4 } from 'uuid'; import { v4 } from 'uuid';
const safeKey = Symbol('chat-id'); export const safeKey = Symbol('chat-id');
export default function useData(oData: any[]) { export default function useData(oData: any[]) {
const [opData, setData] = useState<any[]>([]); const [opData, setData] = useState<any[]>([]);
@@ -17,6 +17,7 @@ export default function useData(oData: any[]) {
}; };
const opInit = (val: any[] = []) => { const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return;
const nData = val.map(i => ({ [safeKey]: v4(), ...i })); const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData); setData(nData);
}; };
@@ -35,5 +36,20 @@ export default function useData(oData: any[]) {
return nData; return nData;
}; };
return { opSafeKey: safeKey, opInit, opReplace, opAdd, opRemove, opData }; const opReplaceItems = (ids: string[], data: any) => {
const nData = [...opData];
let count = 0;
for (let i = 0; i < nData.length; i++) {
const v = nData[i];
if (ids.includes(v[safeKey])) {
count++;
nData[i] = { ...v, ...data };
}
if (count === ids.length) break;
}
setData(nData);
return nData;
};
return { opSafeKey: safeKey, opInit, opReplace, opAdd, opRemove, opData, opReplaceItems };
} }

37
src/hooks/useTable.tsx vendored Normal file
View File

@@ -0,0 +1,37 @@
import React, { useState } from 'react';
import { Table } from 'antd';
import type { TableRowSelection } from 'antd/es/table/interface';
import { safeKey } from '@/hooks/useData';
export default function useTableRowSelection() {
const [selectedRowKeys, setSelectedRowKeys] = useState<React.Key[]>([]);
const [selectedRowIDs, setSelectedRowIDs] = useState<string[]>([]);
const onSelectChange = (newSelectedRowKeys: React.Key[], selectedRows: Record<string|symbol, any>) => {
const keys = selectedRows.map((i: any) => i[safeKey]);
setSelectedRowIDs(keys);
setSelectedRowKeys(newSelectedRowKeys);
};
const rowSelection: TableRowSelection<Record<string, any>> = {
selectedRowKeys,
onChange: onSelectChange,
selections: [
Table.SELECTION_ALL,
Table.SELECTION_INVERT,
Table.SELECTION_NONE,
],
};
return { rowSelection, selectedRowIDs };
}
export const TABLE_PAGINATION = {
hideOnSinglePage: true,
showSizeChanger: true,
showQuickJumper: true,
defaultPageSize: 5,
pageSizeOptions: [5, 10, 15, 20],
showTotal: (total: number) => <span>Total {total} items</span>,
};

View File

@@ -39,6 +39,7 @@ const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
mode="inline" mode="inline"
inlineIndent={12} inlineIndent={12}
items={menuItems} items={menuItems}
defaultOpenKeys={['/model']}
onClick={(i) => go(i.key)} onClick={(i) => go(i.key)}
/> />
</Sider> </Sider>

60
src/main.scss vendored
View File

@@ -17,4 +17,64 @@
html, body { html, body {
padding: 0; padding: 0;
margin: 0; margin: 0;
}
.ant-table-selection-column {
width: 50px !important;
min-width: 50px !important;
}
.chat-prompts-val {
display: inline-block;
width: 100%;
max-width: 300px;
overflow: hidden;
text-overflow: ellipsis;
display: -webkit-box;
-webkit-line-clamp: 3;
-webkit-box-orient: vertical;
}
.chat-add-btn {
margin-bottom: 5px;
}
.chat-prompts-tags {
.ant-tag {
margin: 2px;
}
}
.chat-table-tip {
> span {
line-height: 16px;
}
}
.chat-sync-path {
font-size: 12px;
font-weight: 500;
color: #888;
margin-bottom: 5px;
line-height: 16px;
> div {
max-width: 400px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
color: #2a2a2a;
}
span {
display: inline-block;
// background-color: #d8d8d8;
color: #4096ff;
padding: 0 8px;
height: 20px;
line-height: 20px;
border-radius: 4px;
cursor: pointer;
text-decoration: underline;
}
} }

42
src/routes.tsx vendored
View File

@@ -3,13 +3,16 @@ import {
DesktopOutlined, DesktopOutlined,
BulbOutlined, BulbOutlined,
SyncOutlined, SyncOutlined,
FileSyncOutlined,
UserOutlined, UserOutlined,
} from '@ant-design/icons'; } from '@ant-design/icons';
import type { MenuProps } from 'antd'; import type { MenuProps } from 'antd';
import General from '@view/General'; import General from '@view/General';
import LanguageModel from '@/view/LanguageModel'; import UserCustom from '@/view/model/UserCustom';
import SyncPrompts from '@/view/SyncPrompts'; import SyncPrompts from '@/view/model/SyncPrompts';
import SyncCustom from '@/view/model/SyncCustom';
import SyncRecord from '@/view/model/SyncRecord';
export type ChatRouteMetaObject = { export type ChatRouteMetaObject = {
label: string; label: string;
@@ -19,7 +22,8 @@ export type ChatRouteMetaObject = {
type ChatRouteObject = { type ChatRouteObject = {
path: string; path: string;
element?: JSX.Element; element?: JSX.Element;
meta: ChatRouteMetaObject; hideMenu?: boolean;
meta?: ChatRouteMetaObject;
children?: ChatRouteObject[]; children?: ChatRouteObject[];
} }
@@ -33,7 +37,7 @@ export const routes: Array<ChatRouteObject> = [
}, },
}, },
{ {
path: '/language-model', path: '/model',
meta: { meta: {
label: 'Language Model', label: 'Language Model',
icon: <BulbOutlined />, icon: <BulbOutlined />,
@@ -41,7 +45,7 @@ export const routes: Array<ChatRouteObject> = [
children: [ children: [
{ {
path: 'user-custom', path: 'user-custom',
element: <LanguageModel />, element: <UserCustom />,
meta: { meta: {
label: 'User Custom', label: 'User Custom',
icon: <UserOutlined />, icon: <UserOutlined />,
@@ -55,17 +59,33 @@ export const routes: Array<ChatRouteObject> = [
icon: <SyncOutlined />, icon: <SyncOutlined />,
}, },
}, },
{
path: 'sync-custom',
element: <SyncCustom />,
meta: {
label: 'Sync Custom',
icon: <FileSyncOutlined />,
},
},
{
path: 'sync-custom/:id',
element: <SyncRecord />,
hideMenu: true,
},
] ]
}, },
]; ];
type MenuItem = Required<MenuProps>['items'][number]; type MenuItem = Required<MenuProps>['items'][number];
export const menuItems: MenuItem[] = routes.map(i => ({ export const menuItems: MenuItem[] = routes
...i.meta, .filter((j) => !j.hideMenu)
key: i.path || '', .map(i => ({
children: i?.children?.map((j) => ...i.meta,
({ ...j.meta, key: `${i.path}/${j.path}` || ''})), key: i.path || '',
})); children: i?.children
?.filter((j) => !j.hideMenu)
?.map((j) => ({ ...j.meta, key: `${i.path}/${j.path}` || ''})),
}));
export default () => { export default () => {
return useRoutes(routes); return useRoutes(routes);

41
src/utils.ts vendored
View File

@@ -1,8 +1,9 @@
import { readTextFile, writeTextFile, exists } from '@tauri-apps/api/fs'; import { readTextFile, writeTextFile, exists, createDir } from '@tauri-apps/api/fs';
import { homeDir, join } from '@tauri-apps/api/path'; import { homeDir, join, dirname } from '@tauri-apps/api/path';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
export const CHAT_MODEL_JSON = 'chat.model.json'; export const CHAT_MODEL_JSON = 'chat.model.json';
export const CHAT_MODEL_CMD_JSON = 'chat.model.cmd.json';
export const CHAT_PROMPTS_CSV = 'chat.prompts.csv'; export const CHAT_PROMPTS_CSV = 'chat.prompts.csv';
export const GITHUB_PROMPTS_CSV_URL = 'https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv'; export const GITHUB_PROMPTS_CSV_URL = 'https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv';
export const DISABLE_AUTO_COMPLETE = { export const DISABLE_AUTO_COMPLETE = {
@@ -23,14 +24,23 @@ export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV); return join(await chatRoot(), CHAT_PROMPTS_CSV);
} }
export const readJSON = async (path: string, defaultVal = {}) => { type readJSONOpts = { defaultVal?: Record<string, any>, isRoot?: boolean, isList?: boolean };
export const readJSON = async (path: string, opts: readJSONOpts = {}) => {
const { defaultVal = {}, isRoot = false, isList = false } = opts;
const root = await chatRoot(); const root = await chatRoot();
const file = await join(root, path); let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (!await exists(file)) { if (!await exists(file)) {
writeTextFile(file, JSON.stringify({ if (await dirname(file) !== root) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, isList ? '[]' : JSON.stringify({
name: 'ChatGPT', name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT/blob/main/chat.model.md', link: 'https://github.com/lencx/ChatGPT',
...defaultVal, ...defaultVal,
}, null, 2)) }, null, 2))
} }
@@ -42,10 +52,23 @@ export const readJSON = async (path: string, defaultVal = {}) => {
} }
} }
export const writeJSON = async (path: string, data: Record<string, any>) => { type writeJSONOpts = { dir?: string, isRoot?: boolean };
export const writeJSON = async (path: string, data: Record<string, any>, opts: writeJSONOpts = {}) => {
const { isRoot = false } = opts;
const root = await chatRoot(); const root = await chatRoot();
const file = await join(root, path); let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (isRoot && !await exists(await dirname(file))) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, JSON.stringify(data, null, 2)); await writeTextFile(file, JSON.stringify(data, null, 2));
} }
export const fmtDate = (date: any) => dayjs(date).format('YYYY-MM-DD HH:mm:ss'); export const fmtDate = (date: any) => dayjs(date).format('YYYY-MM-DD HH:mm:ss');
export const genCmd = (act: string) => act.replace(/\s+|\/+/g, '_').replace(/[^\d\w]/g, '').toLocaleLowerCase();

120
src/view/General.tsx vendored
View File

@@ -1,36 +1,54 @@
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { Form, Radio, Switch, Input, Button, Space, message, Tooltip } from 'antd'; import { Form, Radio, Switch, Input, Button, Space, message, Tooltip } from 'antd';
import { QuestionCircleOutlined } from '@ant-design/icons'; import { QuestionCircleOutlined } from '@ant-design/icons';
import { invoke } from '@tauri-apps/api'; import { invoke, shell, path } from '@tauri-apps/api';
import { platform } from '@tauri-apps/api/os'; import { platform } from '@tauri-apps/api/os';
import { ask } from '@tauri-apps/api/dialog'; import { ask } from '@tauri-apps/api/dialog';
import { relaunch } from '@tauri-apps/api/process'; import { relaunch } from '@tauri-apps/api/process';
import { clone, omit, isEqual } from 'lodash'; import { clone, omit, isEqual } from 'lodash';
import { DISABLE_AUTO_COMPLETE } from '@/utils'; import useInit from '@/hooks/useInit';
import { DISABLE_AUTO_COMPLETE, chatRoot } from '@/utils';
const OriginLabel = ({ url }: { url: string }) => { const OriginLabel = ({ url }: { url: string }) => {
return ( return (
<span> <span>
Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined /></Tooltip> Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined style={{ color: '#1677ff' }} /></Tooltip>
</span> </span>
) )
} }
const GlobalShortcut = () => {
return (
<div>
Global Shortcut
{' '}
<Tooltip title={(
<div>
<div>Shortcut definition, modifiers and key separated by "+" e.g. CmdOrControl+Q</div>
<div style={{ margin: '10px 0'}}>If empty, the shortcut is disabled.</div>
<a href="https://tauri.app/v1/api/js/globalshortcut" target="_blank">https://tauri.app/v1/api/js/globalshortcut</a>
</div>
)}>
<QuestionCircleOutlined style={{ color: '#1677ff' }} />
</Tooltip>
</div>
)
}
export default function General() { export default function General() {
const [form] = Form.useForm(); const [form] = Form.useForm();
const [jsonPath, setJsonPath] = useState('');
const [platformInfo, setPlatform] = useState<string>(''); const [platformInfo, setPlatform] = useState<string>('');
const [chatConf, setChatConf] = useState<any>(null); const [chatConf, setChatConf] = useState<any>(null);
const init = async () => { useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'chat.conf.json'));
setPlatform(await platform()); setPlatform(await platform());
const chatData = await invoke('get_chat_conf'); const chatData = await invoke('get_chat_conf');
setChatConf(chatData); setChatConf(chatData);
} });
useEffect(() => {
init();
}, [])
useEffect(() => { useEffect(() => {
form.setFieldsValue(clone(chatConf)); form.setFieldsValue(clone(chatConf));
@@ -55,44 +73,54 @@ export default function General() {
}; };
return ( return (
<Form <>
form={form} <div className="chat-table-tip">
style={{ maxWidth: 500 }} <div className="chat-sync-path">
onFinish={onFinish} <div>PATH: <a onClick={() => shell.open(jsonPath)} title={jsonPath}>{jsonPath}</a></div>
labelCol={{ span: 8 }} </div>
wrapperCol={{ span: 15, offset: 1 }} </div>
> <Form
<Form.Item label="Theme" name="theme"> form={form}
<Radio.Group> style={{ maxWidth: 500 }}
<Radio value="Light">Light</Radio> onFinish={onFinish}
<Radio value="Dark">Dark</Radio> labelCol={{ span: 8 }}
</Radio.Group> wrapperCol={{ span: 15, offset: 1 }}
</Form.Item> >
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked"> <Form.Item label="Theme" name="theme">
<Switch /> <Radio.Group>
</Form.Item> <Radio value="Light">Light</Radio>
{platformInfo === 'darwin' && ( <Radio value="Dark">Dark</Radio>
<Form.Item label="Titlebar" name="titlebar" valuePropName="checked"> </Radio.Group>
</Form.Item>
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked">
<Switch /> <Switch />
</Form.Item> </Form.Item>
)} <Form.Item label={<GlobalShortcut />} name="global_shortcut">
<Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin"> <Input placeholder="CmdOrCtrl+Shift+O" {...DISABLE_AUTO_COMPLETE} />
<Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} /> </Form.Item>
</Form.Item> {platformInfo === 'darwin' && (
<Form.Item label="User Agent (Window)" name="ua_window"> <Form.Item label="Titlebar" name="titlebar" valuePropName="checked">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" /> <Switch />
</Form.Item> </Form.Item>
<Form.Item label="User Agent (SystemTray)" name="ua_tray"> )}
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" /> <Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin">
</Form.Item> <Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} />
<Form.Item> </Form.Item>
<Space size={20}> <Form.Item label="User Agent (Window)" name="ua_window">
<Button onClick={onCancel}>Cancel</Button> <Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
<Button type="primary" htmlType="submit"> </Form.Item>
Submit <Form.Item label="User Agent (SystemTray)" name="ua_tray">
</Button> <Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Space> </Form.Item>
</Form.Item> <Form.Item>
</Form> <Space size={20}>
<Button onClick={onCancel}>Cancel</Button>
<Button type="primary" htmlType="submit">
Submit
</Button>
</Space>
</Form.Item>
</Form>
</>
) )
} }

View File

@@ -1,39 +0,0 @@
.chat-prompts-val {
display: inline-block;
width: 100%;
max-width: 300px;
overflow: hidden;
text-overflow: ellipsis;
display: -webkit-box;
-webkit-line-clamp: 3;
-webkit-box-orient: vertical;
}
.chat-prompts-tags {
.ant-tag {
margin: 2px;
}
}
.add-btn {
margin-bottom: 5px;
}
.chat-model-path {
font-size: 12px;
font-weight: bold;
color: #888;
margin-bottom: 5px;
span {
display: inline-block;
// background-color: #d8d8d8;
color: #4096ff;
padding: 0 8px;
height: 20px;
line-height: 20px;
border-radius: 4px;
cursor: pointer;
text-decoration: underline;
}
}

View File

@@ -1,111 +0,0 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Button, Modal, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useChatModel from '@/hooks/useChatModel';
import useColumns from '@/hooks/useColumns';
import { chatModelPath } from '@/utils';
import { modelColumns } from './config';
import LanguageModelForm from './Form';
import './index.scss';
export default function LanguageModel() {
const [isVisible, setVisible] = useState(false);
const [modelPath, setChatModelPath] = useState('');
const { modelData, modelSet } = useChatModel('user_custom');
const { opData, opInit, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
const formRef = useRef<any>(null);
useEffect(() => {
if (modelData.length <= 0) return;
opInit(modelData);
}, [modelData])
useEffect(() => {
if (!opInfo.opType) return;
if (['edit', 'new'].includes(opInfo.opType)) {
setVisible(true);
}
if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
}
}, [opInfo.opType, formRef]);
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelSet(data);
}
}, [opInfo.opTime])
useInit(async () => {
const path = await chatModelPath();
setChatModelPath(path);
})
const hide = () => {
setVisible(false);
opInfo.resetRecord();
};
const handleOk = () => {
formRef.current?.form?.validateFields()
.then((vals: Record<string, any>) => {
if (modelData.map((i: any) => i.cmd).includes(vals.cmd) && opInfo?.opRecord?.cmd !== vals.cmd) {
message.warning(`"cmd: /${vals.cmd}" already exists, please change the "${vals.cmd}" name and resubmit.`);
return;
}
let data = [];
switch (opInfo.opType) {
case 'new': data = opAdd(vals); break;
case 'edit': data = opReplace(opInfo?.opRecord?.[opSafeKey], vals); break;
default: break;
}
modelSet(data)
hide();
})
};
const handleOpenFile = () => {
invoke('open_file', { path: modelPath });
};
const modalTitle = `${({ new: 'Create', edit: 'Edit' })[opInfo.opType]} Language Model`;
return (
<div>
<Button className="add-btn" type="primary" onClick={opInfo.opNew}>Add Model</Button>
<div className="chat-model-path">PATH: <span onClick={handleOpenFile}>{modelPath}</span></div>
<Table
key={opInfo.opTime}
rowKey="cmd"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
pagination={{
hideOnSinglePage: true,
showSizeChanger: true,
showQuickJumper: true,
defaultPageSize: 5,
pageSizeOptions: [5, 10, 15, 20],
showTotal: (total) => <span>Total {total} items</span>,
}}
/>
<Modal
open={isVisible}
onCancel={hide}
title={modalTitle}
onOk={handleOk}
destroyOnClose
maskClosable={false}
>
<LanguageModelForm record={opInfo?.opRecord} ref={formRef} />
</Modal>
</div>
)
}

View File

@@ -1,28 +0,0 @@
.chat-prompts-tags {
.ant-tag {
margin: 2px;
}
}
.add-btn {
margin-bottom: 5px;
}
.chat-model-path {
font-size: 12px;
font-weight: bold;
color: #888;
margin-bottom: 5px;
span {
display: inline-block;
// background-color: #d8d8d8;
color: #4096ff;
padding: 0 8px;
height: 20px;
line-height: 20px;
border-radius: 4px;
cursor: pointer;
text-decoration: underline;
}
}

View File

@@ -1,91 +0,0 @@
import { useEffect, useState } from 'react';
import { Table, Button, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import { fetch, ResponseType } from '@tauri-apps/api/http';
import { writeTextFile, readTextFile } from '@tauri-apps/api/fs';
import useInit from '@/hooks/useInit';
import useColumns from '@/hooks/useColumns';
import useData from '@/hooks/useData';
import useChatModel from '@/hooks/useChatModel';
import { fmtDate, chatPromptsPath, GITHUB_PROMPTS_CSV_URL } from '@/utils';
import { modelColumns, genCmd } from './config';
import './index.scss';
const promptsURL = 'https://github.com/f/awesome-chatgpt-prompts/blob/main/prompts.csv';
export default function LanguageModel() {
const [loading, setLoading] = useState(false);
const [lastUpdated, setLastUpdated] = useState();
const { modelJson, modelSet } = useChatModel('sys_sync_prompts');
const { opData, opInit, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
// useInit(async () => {
// // const filename = await chatPromptsPath();
// // const data = await readTextFile(filename);
// // const list: Record<string, string>[] = await invoke('parse_prompt', { data });
// // const fileData: Record<string, any> = await invoke('metadata', { path: filename });
// // setLastUpdated(fileData.accessedAtMs);
// // opInit(list);
// console.log('«31» /view/SyncPrompts/index.tsx ~> ', modelJson);
// opInit([]);
// })
useEffect(() => {
if (!modelJson?.sys_sync_prompts) return;
opInit(modelJson?.sys_sync_prompts)
}, [modelJson?.sys_sync_prompts])
const handleSync = async () => {
setLoading(true);
const res = await fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
await writeTextFile(await chatPromptsPath(), data);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
opInit(list);
modelSet(list.map(i => ({ cmd: genCmd(i.act), enable: true, tags: ['chatgpt-prompts'], ...i })));
setLastUpdated(fmtDate(Date.now()) as any);
message.success('ChatGPT Prompts data has been synchronized!');
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
}
setLoading(false);
};
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelSet(data);
}
}, [opInfo.opTime]);
return (
<div>
<Button type="primary" loading={loading} onClick={handleSync}>Sync</Button>
{lastUpdated && <span style={{ marginLeft: 10, color: '#999' }}>Last updated on {fmtDate(lastUpdated)}</span>}
<div className="chat-model-path">URL: <a href={promptsURL} target="_blank">{promptsURL}</a></div>
<Table
key={lastUpdated}
rowKey="act"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
pagination={{
hideOnSinglePage: true,
showSizeChanger: true,
showQuickJumper: true,
defaultPageSize: 5,
pageSizeOptions: [5, 10, 15, 20],
showTotal: (total) => <span>Total {total} items</span>,
}}
/>
</div>
)
}

112
src/view/model/SyncCustom/Form.tsx vendored Normal file
View File

@@ -0,0 +1,112 @@
import { useEffect, useState, ForwardRefRenderFunction, useImperativeHandle, forwardRef } from 'react';
import { Form, Input, Select, Tooltip } from 'antd';
import { v4 } from 'uuid';
import type { FormProps } from 'antd';
import { DISABLE_AUTO_COMPLETE, chatRoot } from '@/utils';
import useInit from '@/hooks/useInit';
interface SyncFormProps {
record?: Record<string|symbol, any> | null;
type: string;
}
const initFormValue = {
act: '',
enable: true,
tags: [],
prompt: '',
};
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record, type }, ref) => {
const isDisabled = type === 'edit';
const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form }));
const [root, setRoot] = useState('');
useInit(async () => {
setRoot(await chatRoot());
});
useEffect(() => {
if (record) {
form.setFieldsValue(record);
}
}, [record]);
const pathOptions = (
<Form.Item noStyle name="protocol" initialValue="https">
<Select disabled={isDisabled}>
<Select.Option value="local">{root}</Select.Option>
<Select.Option value="http">http://</Select.Option>
<Select.Option value="https">https://</Select.Option>
</Select>
</Form.Item>
);
const extOptions = (
<Form.Item noStyle name="ext" initialValue="json">
<Select disabled={isDisabled}>
<Select.Option value="csv">.csv</Select.Option>
<Select.Option value="json">.json</Select.Option>
</Select>
</Form.Item>
);
const jsonTip = (
<Tooltip
title={<pre>{JSON.stringify([
{ cmd: '', act: '', prompt: '' },
{ cmd: '', act: '', prompt: '' },
], null, 2)}</pre>}
>
<a>JSON</a>
</Tooltip>
);
const csvTip = (
<Tooltip
title={<pre>{`"cmd","act","prompt"
"cmd","act","prompt"
"cmd","act","prompt"
"cmd","act","prompt"`}</pre>}
>
<a>CSV</a>
</Tooltip>
);
return (
<>
<Form
form={form}
labelCol={{ span: 4 }}
initialValues={initFormValue}
>
<Form.Item
label="Name"
name="name"
rules={[{ required: true, message: 'Please input name!' }]}
>
<Input placeholder="Please input name" {...DISABLE_AUTO_COMPLETE} />
</Form.Item>
<Form.Item
label="PATH"
name="path"
rules={[{ required: true, message: 'Please input path!' }]}
>
<Input
placeholder="YOUR_PATH"
addonBefore={pathOptions}
addonAfter={extOptions}
{...DISABLE_AUTO_COMPLETE}
/>
</Form.Item>
<Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item>
</Form>
<div className="tip">
<p>The file supports only {csvTip} and {jsonTip} formats.</p>
</div>
</>
)
}
export default forwardRef(SyncForm);

89
src/view/model/SyncCustom/config.tsx vendored Normal file
View File

@@ -0,0 +1,89 @@
import { useState } from 'react';
import { Tag, Space, Popconfirm } from 'antd';
import { HistoryOutlined } from '@ant-design/icons';
import { shell, path } from '@tauri-apps/api';
import { Link } from 'react-router-dom';
import useInit from '@/hooks/useInit';
import { chatRoot, fmtDate } from '@/utils';
export const syncColumns = () => [
{
title: 'Name',
dataIndex: 'name',
key: 'name',
width: 100,
},
{
title: 'Protocol',
dataIndex: 'protocol',
key: 'protocol',
width: 80,
render: (v: string) => <Tag>{v}</Tag>,
},
{
title: 'PATH',
dataIndex: 'path',
key: 'path',
width: 180,
render: (_: string, row: any) => <RenderPath row={row} />
},
{
title: 'Last updated',
dataIndex: 'last_updated',
key: 'last_updated',
width: 140,
render: (v: number) => (
<div>
<HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} />
{ v ? fmtDate(v) : ''}
</div>
),
},
{
title: 'Action',
fixed: 'right',
width: 150,
render: (_: any, row: any, actions: any) => {
return (
<Space>
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
onConfirm={() => actions.setRecord(row, 'sync')}
okText="Yes"
cancelText="No"
>
<a>Sync</a>
</Popconfirm>
{row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>}
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm
title="Are you sure to delete this path?"
onConfirm={() => actions.setRecord(row, 'delete')}
okText="Yes"
cancelText="No"
>
<a>Delete</a>
</Popconfirm>
</Space>
)
}
}
];
const RenderPath = ({ row }: any) => {
const [filePath, setFilePath] = useState('');
useInit(async () => {
setFilePath(await getPath(row));
})
return <a onClick={() => shell.open(filePath)}>{filePath}</a>
};
export const getPath = async (row: any) => {
if (!/^http/.test(row.protocol)) {
return await path.join(await chatRoot(), row.path) + `.${row.ext}`;
} else {
return `${row.protocol}://${row.path}.${row.ext}`;
}
}

139
src/view/model/SyncCustom/index.tsx vendored Normal file
View File

@@ -0,0 +1,139 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Modal, Button, message } from 'antd';
import { invoke, path, fs } from '@tauri-apps/api';
import useData from '@/hooks/useData';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useColumns from '@/hooks/useColumns';
import { TABLE_PAGINATION } from '@/hooks/useTable';
import { CHAT_MODEL_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import { syncColumns, getPath } from './config';
import SyncForm from './Form';
const fmtData = (data: Record<string, any>[] = []) => (Array.isArray(data) ? data : []).map((i) => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), tags: ['user-sync'], enable: true }));
export default function SyncCustom() {
const [isVisible, setVisible] = useState(false);
const { modelData, modelSet } = useChatModel('sync_custom', CHAT_MODEL_JSON);
const { modelCacheCmd, modelCacheSet } = useCacheModel();
const { opData, opInit, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
const formRef = useRef<any>(null);
const hide = () => {
setVisible(false);
opInfo.resetRecord();
};
useEffect(() => {
if (modelData.length <= 0) return;
opInit(modelData);
}, [modelData]);
useEffect(() => {
if (!opInfo.opType) return;
if (opInfo.opType === 'sync') {
const filename = `${opInfo?.opRecord?.id}.json`;
handleSync(filename).then((isOk: boolean) => {
opInfo.resetRecord();
if (!isOk) return;
const data = opReplace(opInfo?.opRecord?.[opSafeKey], { ...opInfo?.opRecord, last_updated: Date.now() });
modelSet(data);
opInfo.resetRecord();
});
}
if (['edit', 'new'].includes(opInfo.opType)) {
setVisible(true);
}
if (['delete'].includes(opInfo.opType)) {
(async () => {
try {
const file = await path.join(await chatRoot(), 'cache_model', `${opInfo?.opRecord?.id}.json`);
await fs.removeFile(file);
} catch(e) {}
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
modelCacheCmd();
})();
}
}, [opInfo.opType, formRef]);
const handleSync = async (filename: string) => {
const record = opInfo?.opRecord;
const isJson = /json$/.test(record?.ext);
const file = await path.join(await chatRoot(), 'cache_model', filename);
const filePath = await getPath(record);
// https or http
if (/^http/.test(record?.protocol)) {
const data = await invoke('sync_user_prompts', { url: filePath, dataType: record?.ext });
if (data) {
await modelCacheSet(data as [], file);
await modelCacheCmd();
message.success('ChatGPT Prompts data has been synchronized!');
return true;
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
return false;
}
}
// local
if (isJson) {
// parse json
const data = await readJSON(filePath, { isRoot: true });
await modelCacheSet(fmtData(data), file);
} else {
// parse csv
const data = await fs.readTextFile(filePath);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
await modelCacheSet(fmtData(list), file);
}
await modelCacheCmd();
return true;
};
const handleOk = () => {
formRef.current?.form?.validateFields()
.then((vals: Record<string, any>) => {
let data = [];
switch (opInfo.opType) {
case 'new': data = opAdd(vals); break;
case 'edit': data = opReplace(opInfo?.opRecord?.[opSafeKey], vals); break;
default: break;
}
modelSet(data);
hide();
})
};
return (
<div>
<Button
className="chat-add-btn"
type="primary"
onClick={opInfo.opNew}
>
Add PATH
</Button>
<Table
key="id"
rowKey="name"
columns={columns}
scroll={{ x: 800 }}
dataSource={opData}
pagination={TABLE_PAGINATION}
/>
<Modal
open={isVisible}
onCancel={hide}
title="Sync PATH"
onOk={handleOk}
destroyOnClose
maskClosable={false}
>
<SyncForm ref={formRef} record={opInfo?.opRecord} type={opInfo.opType} />
</Modal>
</div>
)
}

View File

@@ -1,8 +1,8 @@
import { Switch, Tag, Tooltip } from 'antd'; import { Switch, Tag, Tooltip } from 'antd';
export const genCmd = (act: string) => act.replace(/\s+|\/+/g, '_').replace(/[^\d\w]/g, '').toLocaleLowerCase(); import { genCmd } from '@/utils';
export const modelColumns = () => [ export const syncColumns = () => [
{ {
title: '/{cmd}', title: '/{cmd}',
dataIndex: 'cmd', dataIndex: 'cmd',

12
src/view/model/SyncPrompts/index.scss vendored Normal file
View File

@@ -0,0 +1,12 @@
.chat-table-tip, .chat-table-btns {
display: flex;
justify-content: space-between;
}
.chat-table-btns {
margin-bottom: 5px;
.num {
margin-left: 10px;
}
}

99
src/view/model/SyncPrompts/index.tsx vendored Normal file
View File

@@ -0,0 +1,99 @@
import { useEffect, useState } from 'react';
import { Table, Button, Popconfirm } from 'antd';
import { invoke, path, shell } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot } from '@/utils';
import { syncColumns } from './config';
import './index.scss';
const promptsURL = 'https://github.com/f/awesome-chatgpt-prompts/blob/main/prompts.csv';
export default function SyncPrompts() {
const { rowSelection, selectedRowIDs } = useTable();
const [jsonPath, setJsonPath] = useState('');
const { modelJson, modelSet } = useChatModel('sync_prompts');
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { opData, opInit, opReplace, opReplaceItems, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
const lastUpdated = modelJson?.sync_prompts?.last_updated;
const selectedItems = rowSelection.selectedRowKeys || [];
useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json'));
});
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
const handleSync = async () => {
const data = await invoke('sync_prompts', { time: Date.now() });
if (data) {
opInit(data as any[]);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
}
};
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable })
modelCacheSet(data);
};
return (
<div>
<div className="chat-table-btns">
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
<div>
{selectedItems.length > 0 && (
<>
<Button type="primary" onClick={() => handleEnable(true)}>Enable</Button>
<Button onClick={() => handleEnable(false)}>Disable</Button>
<span className="num">Selected {selectedItems.length} items</span>
</>
)}
</div>
</div>
<div className="chat-table-tip">
<div className="chat-sync-path">
<div>PATH: <a onClick={() => shell.open(promptsURL)} target="_blank" title={promptsURL}>f/awesome-chatgpt-prompts/prompts.csv</a></div>
<div>CACHE: <a onClick={() => shell.open(jsonPath)} target="_blank" title={jsonPath}>{jsonPath}</a></div>
</div>
{lastUpdated && <span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>Last updated on {fmtDate(lastUpdated)}</span>}
</div>
<Table
key={lastUpdated}
rowKey="act"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
rowSelection={rowSelection}
pagination={TABLE_PAGINATION}
/>
</div>
)
}

49
src/view/model/SyncRecord/config.tsx vendored Normal file
View File

@@ -0,0 +1,49 @@
import { Switch, Tag, Tooltip } from 'antd';
import { genCmd } from '@/utils';
export const syncColumns = () => [
{
title: '/{cmd}',
dataIndex: 'cmd',
fixed: 'left',
// width: 120,
key: 'cmd',
render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{row.cmd ? row.cmd : genCmd(row.act)}</Tag>
),
},
{
title: 'Act',
dataIndex: 'act',
key: 'act',
// width: 200,
},
{
title: 'Tags',
dataIndex: 'tags',
key: 'tags',
// width: 150,
render: (v: string[]) => (
<span className="chat-prompts-tags">{v?.map(i => <Tag key={i}>{i}</Tag>)}</span>
),
},
{
title: 'Enable',
dataIndex: 'enable',
key: 'enable',
// width: 80,
render: (v: boolean = false, row: Record<string, any>, action: Record<string, any>) => (
<Switch checked={v} onChange={(v) => action.setRecord({ ...row, enable: v }, 'enable')} />
),
},
{
title: 'Prompt',
dataIndex: 'prompt',
key: 'prompt',
// width: 300,
render: (v: string) => (
<Tooltip overlayInnerStyle={{ width: 350 }} title={v}><span className="chat-prompts-val">{v}</span></Tooltip>
),
},
];

85
src/view/model/SyncRecord/index.tsx vendored Normal file
View File

@@ -0,0 +1,85 @@
import { useEffect, useState } from 'react';
import { useLocation } from 'react-router-dom';
import { ArrowLeftOutlined } from '@ant-design/icons';
import { Table, Button } from 'antd';
import { shell, path } from '@tauri-apps/api';
import useColumns from '@/hooks/useColumns';
import useData from '@/hooks/useData';
import { useCacheModel } from '@/hooks/useChatModel';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot } from '@/utils';
import { getPath } from '@/view/model/SyncCustom/config';
import { syncColumns } from './config';
import useInit from '@/hooks/useInit';
export default function SyncRecord() {
const location = useLocation();
const [filePath, setFilePath] = useState('');
const [jsonPath, setJsonPath] = useState('');
const state = location?.state;
const { rowSelection, selectedRowIDs } = useTable();
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { opData, opInit, opReplace, opReplaceItems, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
const selectedItems = rowSelection.selectedRowKeys || [];
useInit(async () => {
setFilePath(await getPath(state));
setJsonPath(await path.join(await chatRoot(), 'cache_model', `${state?.id}.json`));
})
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable })
modelCacheSet(data);
};
return (
<div>
<div className="chat-table-btns">
<div>
<Button shape="round" icon={<ArrowLeftOutlined />} onClick={() => history.back()} />
</div>
<div>
{selectedItems.length > 0 && (
<>
<Button type="primary" onClick={() => handleEnable(true)}>Enable</Button>
<Button onClick={() => handleEnable(false)}>Disable</Button>
<span className="num">Selected {selectedItems.length} items</span>
</>
)}
</div>
</div>
<div className="chat-table-tip">
<div className="chat-sync-path">
<div>PATH: <a onClick={() => shell.open(filePath)} target="_blank" title={filePath}>{filePath}</a></div>
<div>CACHE: <a onClick={() => shell.open(jsonPath)} target="_blank" title={jsonPath}>{jsonPath}</a></div>
</div>
{state?.last_updated && <span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>Last updated on {fmtDate(state?.last_updated)}</span>}
</div>
<Table
key="prompt"
rowKey="act"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
rowSelection={rowSelection}
pagination={TABLE_PAGINATION}
/>
</div>
)
}

View File

@@ -5,7 +5,7 @@ import type { FormProps } from 'antd';
import Tags from '@comps/Tags'; import Tags from '@comps/Tags';
import { DISABLE_AUTO_COMPLETE } from '@/utils'; import { DISABLE_AUTO_COMPLETE } from '@/utils';
interface LanguageModelProps { interface UserCustomFormProps {
record?: Record<string|symbol, any> | null; record?: Record<string|symbol, any> | null;
} }
@@ -16,7 +16,7 @@ const initFormValue = {
prompt: '', prompt: '',
}; };
const LanguageModel: ForwardRefRenderFunction<FormProps, LanguageModelProps> = ({ record }, ref) => { const UserCustomForm: ForwardRefRenderFunction<FormProps, UserCustomFormProps> = ({ record }, ref) => {
const [form] = Form.useForm(); const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form })); useImperativeHandle(ref, () => ({ form }));
@@ -63,4 +63,4 @@ const LanguageModel: ForwardRefRenderFunction<FormProps, LanguageModelProps> = (
) )
} }
export default forwardRef(LanguageModel); export default forwardRef(UserCustomForm);

139
src/view/model/UserCustom/index.tsx vendored Normal file
View File

@@ -0,0 +1,139 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Button, Modal, message } from 'antd';
import { shell, path } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useColumns from '@/hooks/useColumns';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { chatRoot, fmtDate } from '@/utils';
import { modelColumns } from './config';
import UserCustomForm from './Form';
export default function LanguageModel() {
const { rowSelection, selectedRowIDs } = useTable();
const [isVisible, setVisible] = useState(false);
const [jsonPath, setJsonPath] = useState('');
const { modelJson, modelSet } = useChatModel('user_custom');
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { opData, opInit, opReplaceItems, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
const lastUpdated = modelJson?.user_custom?.last_updated;
const selectedItems = rowSelection.selectedRowKeys || [];
const formRef = useRef<any>(null);
useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'cache_model', 'user_custom.json'));
});
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
useEffect(() => {
if (!opInfo.opType) return;
if (['edit', 'new'].includes(opInfo.opType)) {
setVisible(true);
}
if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelCacheSet(data);
opInfo.resetRecord();
}
}, [opInfo.opType, formRef]);
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
}
}, [opInfo.opTime])
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable })
modelCacheSet(data);
};
const hide = () => {
setVisible(false);
opInfo.resetRecord();
};
const handleOk = () => {
formRef.current?.form?.validateFields()
.then(async (vals: Record<string, any>) => {
if (modelCacheJson.map((i: any) => i.cmd).includes(vals.cmd) && opInfo?.opRecord?.cmd !== vals.cmd) {
message.warning(`"cmd: /${vals.cmd}" already exists, please change the "${vals.cmd}" name and resubmit.`);
return;
}
let data = [];
switch (opInfo.opType) {
case 'new': data = opAdd(vals); break;
case 'edit': data = opReplace(opInfo?.opRecord?.[opSafeKey], vals); break;
default: break;
}
await modelCacheSet(data);
opInit(data);
modelSet({
id: 'user_custom',
last_updated: Date.now(),
});
hide();
})
};
const modalTitle = `${({ new: 'Create', edit: 'Edit' })[opInfo.opType]} Model`;
return (
<div>
<div className="chat-table-btns">
<Button className="chat-add-btn" type="primary" onClick={opInfo.opNew}>Add Model</Button>
<div>
{selectedItems.length > 0 && (
<>
<Button type="primary" onClick={() => handleEnable(true)}>Enable</Button>
<Button onClick={() => handleEnable(false)}>Disable</Button>
<span className="num">Selected {selectedItems.length} items</span>
</>
)}
</div>
</div>
{/* <div className="chat-model-path">PATH: <span onClick={handleOpenFile}>{modelPath}</span></div> */}
<div className="chat-table-tip">
<div className="chat-sync-path">
<div>CACHE: <a onClick={() => shell.open(jsonPath)} title={jsonPath}>{jsonPath}</a></div>
</div>
{lastUpdated && <span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>Last updated on {fmtDate(lastUpdated)}</span>}
</div>
<Table
key={lastUpdated}
rowKey="cmd"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={opData}
rowSelection={rowSelection}
pagination={TABLE_PAGINATION}
/>
<Modal
open={isVisible}
onCancel={hide}
title={modalTitle}
onOk={handleOk}
destroyOnClose
maskClosable={false}
>
<UserCustomForm record={opInfo?.opRecord} ref={formRef} />
</Modal>
</div>
)
}