Compare commits

...

191 Commits

Author SHA1 Message Date
lencx
51344ebe00 v0.8.1 2023-01-12 19:29:21 +08:00
lencx
b3589257ae v0.8.1 2023-01-12 19:29:05 +08:00
lencx
9c960b2a8e v0.8.1 2023-01-12 19:28:24 +08:00
lencx
080a77031d v0.8.1 2023-01-12 19:28:16 +08:00
lencx
71b37f690c fix: export (#126 #185 #189) 2023-01-12 19:21:48 +08:00
lencx
9fa0fe8e6c readme 2023-01-11 00:10:18 +08:00
lencx
1045f0a689 v0.8.0 2023-01-07 15:30:12 +08:00
lencx
5062769956 Merge pull request #169 from lencx/dev 2023-01-07 15:28:59 +08:00
lencx
b791044a22 v0.8.0 2023-01-07 15:25:08 +08:00
lencx
81ff2b89f1 v0.8.1 2023-01-07 14:57:41 +08:00
lencx
ee576b9a69 fix: winget release 2023-01-07 14:57:28 +08:00
lencx
d7ab912917 fix: winget release 2023-01-07 14:53:18 +08:00
lencx
fc74ce30ed fix: winget release 2023-01-07 14:46:34 +08:00
lencx
fad7c5221b fix: winget release 2023-01-07 14:38:16 +08:00
lencx
3e4a4a3031 v0.8.0 2023-01-07 13:15:22 +08:00
lencx
8863e4575c chore: dalle2 2023-01-07 13:14:03 +08:00
lencx
94a8112d45 readme 2023-01-07 12:25:30 +08:00
lencx
7ee9b0c716 chore: dalle2 2023-01-07 12:11:13 +08:00
lencx
b97d3a55f2 chore: dalle2 2023-01-07 00:04:37 +08:00
lencx
a3b40f7f40 chore: dalle2 2023-01-06 23:48:53 +08:00
lencx
de5533d942 chore: dalle2 2023-01-06 22:58:26 +08:00
lencx
8022594ace readme 2023-01-06 19:43:22 +08:00
lencx
ec20d03c50 readme 2023-01-06 18:44:35 +08:00
lencx
bb6e431bd9 readme 2023-01-06 16:29:59 +08:00
lencx
8c2303dec9 chore: dalle2 2023-01-06 14:39:25 +08:00
lencx
d0df8df108 Merge branch 'main' into dev 2023-01-06 10:47:55 +08:00
lencx
a7cff0df66 fix: build error 2023-01-06 10:45:37 +08:00
lencx
cd2b7832d4 Merge pull request #155 from Irene-123/chore-dalle2 2023-01-06 08:36:07 +08:00
kirti purohit
3919b24df8 chore: dalle2(#122) 2023-01-05 21:53:49 +05:30
lencx
fc25746c2d Merge pull request #154 from manudss/patch-1 2023-01-05 20:48:38 +08:00
Emmanuel De Saint Steban
ab0999d7d7 chore(release.yml): Add publish to winget in release actions
See information about this github actions here : https://github.com/marketplace/actions/winget-releaser 

ref : #142
2023-01-05 10:48:23 +01:00
lencx
c2b0e02b75 merge 2023-01-05 17:21:18 +08:00
lencx
a6e746d27e merge 2023-01-05 17:18:47 +08:00
lencx
a0896c9799 readme 2023-01-05 14:57:50 +08:00
lencx
a87654e427 readme 2023-01-05 13:32:41 +08:00
lencx
8ad7a8a9b0 Merge pull request #153 from yixinBC/yixinBC-patch-1 2023-01-05 13:22:41 +08:00
lencx
9f3c72ec6d chore: style 2023-01-05 13:18:35 +08:00
yixinBC
13acb1d56e Merge branch 'lencx:main' into yixinBC-patch-1 2023-01-05 12:44:21 +08:00
lencx
d982a09870 Merge pull request #152 from tk103331/main 2023-01-05 12:43:56 +08:00
yixinBC
1dee42dc79 readme: AUR installation 2023-01-05 12:40:21 +08:00
lencx
e3c9b16de3 Merge branch 'main' into dev 2023-01-05 12:38:58 +08:00
tk
d657a6e262 Merge branch 'lencx:main' into main 2023-01-05 12:36:51 +08:00
lencx
e0c4584529 Merge pull request #151 from tk103331/auto_update 2023-01-05 12:29:54 +08:00
lencx
f812d5ab04 readme 2023-01-05 12:24:02 +08:00
lencx
c1cec366fb readme 2023-01-05 11:53:29 +08:00
tk
3e300c66c3 Merge branch 'lencx:main' into main 2023-01-05 11:52:49 +08:00
tk103331
f7335d9162 control center theme 2023-01-05 11:52:23 +08:00
lencx
e7ed106a01 readme 2023-01-05 11:48:43 +08:00
lencx
cf09dbe429 readme 2023-01-05 11:45:01 +08:00
lencx
73a91df77e Merge pull request #148 from yixinBC/main 2023-01-05 11:26:20 +08:00
yixinBC
fc6912eb59 Revert "readme:install conflict"
This reverts commit 2f0e617b1a.
2023-01-05 10:52:02 +08:00
tk103331
f646684f4d auto update policy 2023-01-05 10:39:46 +08:00
yixinBC
2f0e617b1a readme:install conflict 2023-01-05 10:15:41 +08:00
yixinBC
3e9c2f8e94 resdme:zh_cn 2023-01-05 09:59:15 +08:00
yixinBC
8689082c7b readme:install 2023-01-05 09:41:09 +08:00
lencx
aa98d7dd2a chore: fmt 2023-01-04 23:56:57 +08:00
lencx
a361ce52b5 chore: dalle2 2023-01-04 23:18:41 +08:00
lencx
594260ce5d feat: dalle2 (#122) 2023-01-04 22:35:37 +08:00
lencx
53a240953f chore: spawn 2023-01-04 22:35:37 +08:00
lencx
4c86477d6f chore: window 2023-01-04 22:35:37 +08:00
lencx
e2235e7060 chore: log level 2023-01-04 22:35:37 +08:00
lencx
3dd49cd5d3 fix: tauri updater 2023-01-04 21:44:23 +08:00
lencx
65bb811f15 Merge pull request #147 from yixinBC/main 2023-01-04 17:58:04 +08:00
yixinBC
ce60c0566f fix:typo 2023-01-04 17:48:20 +08:00
yixinBC
0ab4832349 update bug-report template 2023-01-04 17:46:23 +08:00
lencx
5286de2f1e Merge pull request #145 from tk103331/main 2023-01-04 17:16:46 +08:00
tk
197d458c78 Merge branch 'lencx:main' into main 2023-01-04 16:58:47 +08:00
tk103331
845229d629 follow system theme 2023-01-04 16:55:34 +08:00
lencx
30099f730f Merge pull request #144 from tk103331/main 2023-01-04 13:53:57 +08:00
tk
f26dace129 fix 2023-01-04 13:30:51 +08:00
tk
0a434f1add Merge branch 'lencx:main' into main 2023-01-04 13:20:15 +08:00
tk103331
883f36b26d Manually check for updates in 'Control Center' 2023-01-04 13:19:01 +08:00
tk103331
a7cd73b314 Add 'Auto Check Update' config to control whether to check for updates on start 2023-01-04 13:10:46 +08:00
tk103331
c19698bc41 Manually check for updates 2023-01-04 12:43:26 +08:00
lencx
0197d12119 readme 2023-01-04 11:29:41 +08:00
lencx
038b13dd31 Merge pull request #135 from Irene-123/updated-frontend-readme 2023-01-04 09:49:25 +08:00
lencx
c7a6cfc897 Merge pull request #141 from yixinBC/main 2023-01-04 09:41:45 +08:00
yixinBC
a6912deb9f add issue templates for docs 2023-01-04 09:34:40 +08:00
yixinBC
709029a6c6 Merge branch 'main' of https://github.com/yixinBC/ChatGPT 2023-01-04 09:26:37 +08:00
yixinBC
060f3b5915 fix:typo 2023-01-04 09:22:10 +08:00
kirti purohit
139afa2943 removed numbering & rectified html 2023-01-03 22:20:06 +05:30
kirti purohit
cef8ffc1a7 fix: headers, license 2023-01-03 17:12:25 +05:30
lencx
693b83e0c6 Merge pull request #132 from yixinBC/main 2023-01-03 18:47:57 +08:00
yixinBC
b35b035c09 add issue templates 2023-01-03 18:39:43 +08:00
lencx
b8614e73ff Merge pull request #130 from tk103331/main 2023-01-03 17:43:47 +08:00
tiankai
6bd52fe961 Add "Reset to defaults" in Control Center 2023-01-03 17:35:20 +08:00
lencx
b370fd187c Merge pull request #128 from turric4n/patch-1 2023-01-03 16:28:31 +08:00
Turrican
9333800002 Update AWESOME.md 2023-01-03 08:50:58 +01:00
lencx
b76d141dbc add todo 2023-01-02 18:24:35 +08:00
lencx
b66fbd617d readme 2023-01-02 18:11:59 +08:00
lencx
802a201d82 readme 2023-01-02 18:10:02 +08:00
lencx
508752691c readme 2023-01-01 20:09:10 +08:00
lencx
484f1d6921 readme 2023-01-01 17:40:06 +08:00
lencx
a27876b7a9 readme 2023-01-01 16:23:43 +08:00
lencx
7c4f0bf67f Merge pull request #121 from yixinBC/yixinBC-patch-1 2023-01-01 13:12:08 +08:00
Bu Yixin
41cc76d557 make AppImage the default Linux installation file
Almost all Linux distributions can run AppImage file,however, only those Linux distributions which has  `dpkg` command can install a deb file.BTW, AppImage contains all the dependencies and library files that app needs while running,which can avoid many local library link errors.
2023-01-01 12:51:52 +08:00
lencx
a6bac89a87 Merge pull request #119 from lencx/doc 2022-12-31 22:38:43 +08:00
lencx
b7591c0b41 readme 2022-12-31 22:38:04 +08:00
lencx
ffa8ff1d03 Merge pull request #118 from lencx/revert-117-doc 2022-12-31 21:39:30 +08:00
lencx
bac33c4689 Revert "doc: readme" 2022-12-31 21:37:36 +08:00
lencx
bce27c1e39 Merge pull request #117 from lencx/doc 2022-12-31 21:33:33 +08:00
Vinchan
5e1295677c doc: readme zh-cn 2022-12-31 21:28:45 +08:00
lencx
51891e8a8a v0.7.4 2022-12-31 21:28:45 +08:00
lencx
10993667ab readme 2022-12-31 21:28:45 +08:00
lencx
fd62c2a8c4 readme 2022-12-31 21:28:45 +08:00
lencx
85fbd4a104 refactor: global shortcut 2022-12-31 21:28:45 +08:00
lencx
5ebda4105d refactor: prompts sync 2022-12-31 21:28:45 +08:00
lencx
4c7907c106 chore: conf path 2022-12-31 21:28:45 +08:00
lencx
beddf76198 fix: customize global shortcuts (#108) 2022-12-31 21:28:45 +08:00
lencx
d450d35935 Merge pull request #116 from VinchanGit/main 2022-12-31 21:17:41 +08:00
Vinchan
11492fef24 feature:readme zh-cn 2022-12-31 21:10:12 +08:00
lencx
74c0d07a76 Merge pull request #115 from lencx/dev 2022-12-31 20:27:49 +08:00
lencx
dc769235a7 v0.7.4 2022-12-31 20:11:21 +08:00
lencx
52cc029b01 readme 2022-12-31 20:11:15 +08:00
lencx
39dc007513 readme 2022-12-31 20:08:11 +08:00
lencx
ba1fe9a603 refactor: global shortcut 2022-12-31 20:07:09 +08:00
lencx
e1f8030009 refactor: prompts sync 2022-12-31 18:01:31 +08:00
lencx
9a392a71f6 chore: conf path 2022-12-31 12:43:47 +08:00
lencx
dc0c78fee2 fix: customize global shortcuts (#108) 2022-12-30 23:46:30 +08:00
lencx
b3bd54ce81 Merge pull request #101 from lencx/dev 2022-12-29 21:31:43 +08:00
lencx
ba21fa85d2 v0.7.3 2022-12-29 21:06:40 +08:00
lencx
2ab35bb925 readme 2022-12-29 21:06:29 +08:00
lencx
9cacad0120 chore: optim style 2022-12-29 21:05:21 +08:00
lencx
f1fa859961 chore: optim 2022-12-29 19:29:33 +08:00
lencx
9a9fb24de8 chore: icon & global shortcuts 2022-12-29 01:42:00 +08:00
lencx
3424666ec9 chore: tauri conf 2022-12-29 01:42:00 +08:00
lencx
416bf7064c Merge pull request #97 from Sturlen/main 2022-12-28 18:22:32 +08:00
Sturlen
f5cf3acd3a fix: matching action button colors 2022-12-28 10:47:30 +01:00
lencx
975ffd2d84 Merge pull request #92 from lencx/dev 2022-12-28 03:49:04 +08:00
lencx
145264719f chore: action 2022-12-28 03:35:08 +08:00
lencx
a929376cb2 v0.7.2 2022-12-28 03:34:05 +08:00
lencx
478049e23e fix: windows can't start 2022-12-28 03:33:40 +08:00
lencx
631dee95a7 fix: windows can't start 2022-12-28 03:07:17 +08:00
lencx
c4ff0b4107 Merge pull request #91 from lencx/dev 2022-12-28 01:44:22 +08:00
lencx
bcd350584e v0.7.1 2022-12-28 01:25:49 +08:00
lencx
050045f644 chore: action 2022-12-28 01:25:28 +08:00
lencx
7e9440b45e readme 2022-12-28 01:13:22 +08:00
lencx
cd9c0ac742 fix: windows can't start (#85) 2022-12-28 01:13:00 +08:00
lencx
2d018c4967 fix: tray icon (#87) 2022-12-28 01:12:03 +08:00
lencx
f4d3cc6c8e Merge branch 'main' of github.com:lencx/ChatGPT 2022-12-27 21:33:29 +08:00
xueweiwujxw
cd6cece45e fix(src-tauri/src/app/menu.rs): warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 21:29:46 +08:00
lencx
54b5b63f0e v0.7.0 2022-12-27 21:29:11 +08:00
lencx
680f1b01ad readme 2022-12-27 21:29:11 +08:00
lencx
078b0296f5 chore: cmd 2022-12-27 21:29:11 +08:00
lencx
c956758a4a readme 2022-12-27 21:29:11 +08:00
lencx
477120ef3b feat: use the keyboard to select the slash command 2022-12-27 21:29:11 +08:00
lencx
0ee95630ef Merge pull request #86 from xueweiwujxw/menu-warn 2022-12-27 21:15:28 +08:00
xueweiwujxw
fb0319a977 🐞 fix(src-tauri/src/app/menu.rs): fix warning on linux
add `#[cfg(target_os = "macos")]` when declare titlebar and titlebar_menu
2022-12-27 20:50:23 +08:00
lencx
ea1a78abf5 Merge pull request #84 from lencx/dev 2022-12-27 15:31:56 +08:00
lencx
3428e11b85 v0.7.0 2022-12-27 15:15:47 +08:00
lencx
0e0771d0ec readme 2022-12-27 15:14:49 +08:00
lencx
d78e2ad0b3 chore: cmd 2022-12-27 15:14:24 +08:00
lencx
ae31da0b29 readme 2022-12-27 14:54:28 +08:00
lencx
39febe759e feat: use the keyboard to select the slash command 2022-12-27 14:54:28 +08:00
lencx
06ee907199 Merge pull request #81 from beilunyang/patch-1 2022-12-27 12:17:58 +08:00
BeilunYang
f8c1ca5c56 fix(build): mac m1 chip copy/paste 2022-12-27 11:34:16 +08:00
lencx
6da58269bd Merge pull request #79 from weltonrodrigo/patch-1 2022-12-26 10:19:28 +08:00
Welton Rodrigo Torres Nascimento
4bf6c61bee bump homebrew cask to 0.6.10 2022-12-25 20:29:47 -03:00
lencx
a07c85a9cc Merge pull request #78 from lencx/dev 2022-12-25 09:22:15 +08:00
lencx
95a9f12b68 v0.6.10 2022-12-25 08:54:11 +08:00
lencx
252b0f3e15 fix: windows sync 2022-12-25 08:53:58 +08:00
lencx
ed268b32b3 Merge pull request #77 from lencx/dev 2022-12-25 02:27:58 +08:00
lencx
e2319f2fda v0.6.9 2022-12-25 02:01:51 +08:00
lencx
9ec69631f3 readme 2022-12-25 02:01:47 +08:00
lencx
83437ffea7 Merge pull request #73 from lencx/dev 2022-12-24 22:56:30 +08:00
lencx
be9846dc22 readme 2022-12-24 22:31:03 +08:00
lencx
f071e0d6bc v0.6.8 2022-12-24 22:30:28 +08:00
lencx
62a176d20c Merge pull request #72 from lencx/dev 2022-12-24 21:51:21 +08:00
lencx
2f8ff36638 v0.6.7 2022-12-24 21:37:35 +08:00
lencx
fe236e3c66 Merge pull request #71 from lencx/dev 2022-12-24 21:20:41 +08:00
lencx
38e319a215 v0.6.6 2022-12-24 21:06:16 +08:00
lencx
05057d06ad fix: unable to synchronize 2022-12-24 21:05:51 +08:00
lencx
0b0b832130 Merge pull request #70 from lencx/dev 2022-12-24 20:26:23 +08:00
lencx
413d3354c7 v0.6.5 2022-12-24 20:07:07 +08:00
lencx
f1c7fff800 readme 2022-12-24 20:06:56 +08:00
lencx
6fe90dea5b fix: path not allowed on the configured scope (#64) 2022-12-24 20:04:24 +08:00
lencx
25ab2b0368 chore: optim 2022-12-24 20:04:14 +08:00
lencx
94973b1420 Merge pull request #69 from JacobLinCool/patch-1 2022-12-24 01:10:52 +08:00
JacobLinCool
0930cd782a docs: fix cask name in brewfile section 2022-12-24 00:28:15 +08:00
lencx
0733bba4bf Merge pull request #67 from lencx/fix 2022-12-23 23:07:54 +08:00
lencx
bf623365da v0.6.4 2022-12-23 22:51:39 +08:00
lencx
dc88ea9182 fix: path not allowed on the configured scope (#64) 2022-12-23 22:51:23 +08:00
lencx
f411541a76 Merge pull request #66 from lencx/fix 2022-12-23 22:41:09 +08:00
lencx
ca3badc783 v0.6.3 2022-12-23 22:27:46 +08:00
lencx
d7328f576a v0.6.3 2022-12-23 22:27:37 +08:00
lencx
eaf72e2b73 fix: action 2022-12-23 22:23:35 +08:00
lencx
bd2c4fff5c fix: action 2022-12-23 22:10:27 +08:00
lencx
3ca66cf309 v0.6.3 2022-12-23 21:56:45 +08:00
lencx
44c91bc85c fix: path not allowed on the configured scope 2022-12-23 21:56:16 +08:00
lencx
a75ae5e615 Merge pull request #65 from lencx/fix 2022-12-23 21:46:01 +08:00
lencx
7b8f29534b Merge pull request #63 from lencx/fix 2022-12-23 20:21:34 +08:00
58 changed files with 1697 additions and 516 deletions

43
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@@ -0,0 +1,43 @@
name: "🕷️ Bug report"
description: "report bugs"
title: "[Bug]"
labels:
- "bug"
body:
- type: markdown
attributes:
value: "Please make sure to [search for existing issues](https://github.com/lencx/ChatGPT/issues) before filing a new one!"
- type: markdown
attributes:
value: |
## Bug report
Please fill in the following information to help us reproduce the bug:
- type: input
id: version
attributes:
label: Version
description: "Please specify the version of ChatGPT you are using, a newer version may have fixed the bug you encountered.Check the [UPDATE_LOG](https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md) for more information."
placeholder: "e.g. v0.1.0"
validations:
required: true
- type: textarea
id: bug
attributes:
label: Bug description
description: |
Please describe the bug here,if possible, please provide a minimal example to reproduce the bug.The content of `~/.chatgpt/chatgpt.log` may be helpful if you encounter a crash.
validations:
required: true
- type: input
id: OS
attributes:
label: OS
description: "Please specify the OS you are using."
placeholder: "e.g. Ubuntu 22.04"
validations:
required: true
- type: textarea
id: environment
attributes:
label: Environment
description: "If you think your environment may be related to the problem, please describe it here."

View File

@@ -0,0 +1,37 @@
name: "❌ Build error report"
description: "report errors when building by yourself"
title: "[Build Error]"
labels:
- "build error"
body:
- type: markdown
attributes:
value: "Please make sure to [search for existing issues](https://github.com/lencx/ChatGPT/issues) before filing a new one!"
- type: markdown
attributes:
value: "Please make sure to build from the source code with the latest version of ChatGPT."
- type: markdown
attributes:
value: |
## Build error report
Please fill in the following information to help us reproduce the bug:
- type: textarea
id: error
attributes:
label: Error message
description: "Please paste the error message here."
validations:
required: true
- type: input
id: OS
attributes:
label: OS
description: "Please specify the OS you are using."
placeholder: "e.g. Ubuntu 22.04"
validations:
required: true
- type: textarea
id: environment
attributes:
label: Environment
description: "If you think your environment may be related to the problem, please describe it here."

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1 @@
blank_issues_enabled: false

View File

@@ -0,0 +1,19 @@
name: "📚 Documentation Issue"
description: "report documentation issues, typos welcome!"
title: "[Doc]"
labels:
- "documentation"
body:
- type: markdown
attributes:
value: "Please make sure to [search for existing issues](https://github.com/lencx/ChatGPT/issues) before creating a new one."
- type: textarea
id: doc-description
attributes:
label: "Provide a description of requested docs changes"
description: "Briefly describe the requested docs changes."
validations:
required: true
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -0,0 +1,34 @@
name: "⭐ Feature or enhancement request"
description: "suggest new features or enhancements"
title: "[Feature]"
labels:
- "enhancement"
body:
- type: markdown
attributes:
value: "Please make sure to [search for existing issues](https://github.com/lencx/ChatGPT/issues) before creating a new one."
- type: textarea
id: feature-description
attributes:
label: "Feature description"
description: "Describe the feature or enhancements you'd like to see."
validations:
required: true
- type: textarea
id: motivation
attributes:
label: "Motivation"
description: "Describe the motivation for this feature or enhancement."
- type: textarea
id: alternatives
attributes:
label: "Alternatives"
description: "Describe any alternatives you've considered."
- type: textarea
id: additional-context
attributes:
label: "Additional context"
description: "Add any other context or screenshots about the feature request here."
- type: markdown
attributes:
value: Please limit one request per issue.

34
.github/ISSUE_TEMPLATE/security.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: "⚠️ Security&Privacy issue"
description: "Report security or privacy issues"
title: "[Security]"
labels:
- "security"
body:
- type: markdown
attributes:
value: "Please make sure to [search for existing issues](https://github.com/lencx/ChatGPT/issues) before creating a new one."
- type: textarea
id: security-description
attributes:
label: "Description"
description: "Describe the security or privacy issue."
validations:
required: true
- type: textarea
id: motivation
attributes:
label: "Motivation"
description: "Describe the motivation for this security or privacy issue."
- type: textarea
id: alternatives
attributes:
label: "Alternatives"
description: "Describe any alternatives you've considered."
- type: textarea
id: additional-context
attributes:
label: "Additional context"
description: "Add any other context or screenshots about the security or privacy issue here."
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -2,13 +2,12 @@ name: Release CI
on:
push:
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
jobs:
create-release:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
outputs:
RELEASE_UPLOAD_ID: ${{ steps.create_release.outputs.id }}
@@ -19,7 +18,7 @@ jobs:
shell: bash
run: |
echo "using version tag ${GITHUB_REF:10}"
echo ::set-output name=version::"${GITHUB_REF:10}"
echo "version=${GITHUB_REF:10}" >> $GITHUB_ENV
- name: Create Release
id: create_release
@@ -27,8 +26,8 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: '${{ steps.get_version.outputs.VERSION }}'
release_name: 'ChatGPT ${{ steps.get_version.outputs.VERSION }}'
tag_name: '${{ env.version }}'
release_name: 'ChatGPT ${{ env.version }}'
body: 'See the assets to download this version and install.'
build-tauri:
@@ -36,47 +35,35 @@ jobs:
strategy:
fail-fast: false
matrix:
platform: [macos-latest, ubuntu-latest, windows-latest]
platform: [macos-latest, ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
- name: Setup node
uses: actions/setup-node@v1
- uses: actions/checkout@v3
- name: setup node
uses: actions/setup-node@v3
with:
node-version: 18
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
# Rust cache
- uses: Swatinem/rust-cache@v1
node-version: 16
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
- name: install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-latest'
if: matrix.platform == 'ubuntu-20.04'
run: |
sudo apt-get update
sudo apt-get install -y libgtk-3-dev webkit2gtk-4.0 libappindicator3-dev librsvg2-dev patchelf
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
- name: Yarn cache
uses: actions/cache@v2
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install app dependencies and build it
run: yarn && yarn build:fe
# - name: Rewrite tauri.conf.json
# run: yarn fix:conf
- name: fix tray icon
if: matrix.platform != 'macos-latest'
run: |
yarn fix:tray
- uses: tauri-apps/tauri-action@v0.3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -87,7 +74,7 @@ jobs:
releaseId: ${{ needs.create-release.outputs.RELEASE_UPLOAD_ID }}
updater:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
needs: [create-release, build-tauri]
steps:
@@ -102,3 +89,14 @@ jobs:
# 📝: Edit the deployment directory
publish_dir: ./updater
force_orphan: true
publish-winget:
# Action can only be run on windows
runs-on: windows-latest
needs: [create-release, build-tauri]
steps:
- uses: vedantmgoyal2009/winget-releaser@v1
with:
identifier: lencx.ChatGPT
token: ${{ secrets.WINGET_TOKEN }}
version: ${{ env.version }}

View File

@@ -1,6 +1,6 @@
# Awesome ChatGPT
- [Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts) - This repo includes ChatGPT promt curation to use ChatGPT better.
- [Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts) - This repo includes ChatGPT prompt curation to use ChatGPT better.
- [Awesome ChatGPT](https://github.com/humanloop/awesome-chatgpt) - Curated list of awesome tools, demos, docs for ChatGPT and GPT-3
## Extension

View File

@@ -1,66 +1,81 @@
<p align="center">
<img width="180" src="./public/logo.png" alt="ChatGPT">
<h1 align="center">ChatGPT</h1>
<p align="center">ChatGPT 桌面应用Mac, Windows and Linux</p>
</p>
> ChatGPT 桌面应用
[![English badge](https://img.shields.io/badge/%E8%8B%B1%E6%96%87-English-blue)](./README.md)
[![简体中文 badge](https://img.shields.io/badge/%E7%AE%80%E4%BD%93%E4%B8%AD%E6%96%87-Simplified%20Chinese-blue)](./README-ZH_CN.md)
[![简体中文 badge](https://img.shields.io/badge/%E7%AE%80%E4%BD%93%E4%B8%AD%E6%96%87-Simplified%20Chinese-blue)](./README-ZH_CN.md)\
![License](https://img.shields.io/badge/License-Apache%202-green.svg)
[![ChatGPT downloads](https://img.shields.io/github/downloads/lencx/ChatGPT/total.svg?style=flat-square)](https://github.com/lencx/ChatGPT/releases)
[![chat](https://img.shields.io/badge/chat-discord-blue?style=flat&logo=discord)](https://discord.gg/aPhCRf4zZr)
[![lencx](https://img.shields.io/twitter/follow/lencx_.svg?style=social)](https://twitter.com/lencx_)
[Awesome ChatGPT](./AWESOME.md)
<a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 40px !important;width: 145px !important;" ></a>
## 📦 下载
## 📦 安装
[📝 更新日志](./UPDATE_LOG.md)
- [📝 更新日志](./UPDATE_LOG.md)
- [🕒 历史版本...](https://github.com/lencx/ChatGPT/releases)
<!-- download start -->
**最新版:**
### Windows
- `Mac`: [ChatGPT_0.6.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/ChatGPT_0.6.2_x64.dmg)
- `Linux`: [chat-gpt_0.6.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/chat-gpt_0.6.2_amd64.deb)
- `Windows`: [ChatGPT_0.6.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/ChatGPT_0.6.2_x64_en-US.msi)
- [ChatGPT_0.8.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT_0.8.1_x64_en-US.msi):
- 使用 [winget](https://winstall.app/apps/lencx.ChatGPT):
```bash
# install the latest version
winget install --id=lencx.ChatGPT -e
[其他版本...](https://github.com/lencx/ChatGPT/releases)
# install the specified version
winget install --id=lencx.ChatGPT -e --version 0.8.1
```
**注意:如果安装路径和应用名称相同,会导致冲突 ([#142](https://github.com/lencx/ChatGPT/issues/142#issuecomment-0.8.1))**
### Mac
- [ChatGPT_0.8.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT_0.8.1_x64.dmg)
- [ChatGPT.app.tar.gz](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT.app.tar.gz)
- Homebrew \
_[Homebrew 快捷安装](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine
```
如果你坚持使用 _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_ ,则需要添加以下配置:
```rb
repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git"
cask "chatgpt", args: { "no-quarantine": true }
```
### Linux
- [chat-gpt_0.8.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/chat-gpt_0.8.1_amd64.deb)
- [chat-gpt_0.8.1_amd64.AppImage](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/chat-gpt_0.8.1_amd64.AppImage): **工作可靠,`.deb` 运行失败时可以尝试它**
- 使用 [AUR](https://aur.archlinux.org/packages/chatgpt-desktop-bin):
```bash
yay -S chatgpt-desktop-bin
```
<!-- download end -->
### brew 安装
Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine
```
Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this:
```rb
repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true }
```
## 📢 公告
这是一个令人兴奋的重大更新。像 `Telegram 机器人指令` 那样工作,帮助你快速填充自定模型,来让 ChatGPT 按照你想要的方式去工作。这个项目倾注了我大量业余时间,如果它对你有所帮助,宣传转发,或者 star 都是对我的巨大鼓励。我希望我可以持续更新下去,加入更多有趣的功能。
### 如何使用指令?
你可以从 [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 来寻找有趣的功能来导入到应用。
你可以从 [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 来寻找有趣的功能来导入到应用。也可以使用 `Sync Prompts`,来一键同步所有,如果你不想让某些提示出现在你的斜杠命令,你可以禁用它们。
![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png)
![chatgpt menu](./assets/chatgpt-menu.png)
![chatgpt sync prompts](./assets/chatgpt-sync-prompts.png)
数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。
<!-- 数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 -->
在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。斜杠命令下,使用 TAB 键修改 `{q}` 标签内容(仅支持单个修改 [#54](https://github.com/lencx/ChatGPT/issues/54))。
在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。斜杠命令下,使用 TAB 键修改 `{q}` 标签内容(仅支持单个修改 [#54](https://github.com/lencx/ChatGPT/issues/54)。使用键盘 `` 和 ``(上下键)来选择斜杠指令
![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif)
@@ -74,11 +89,13 @@ cask "popcorn-time", args: { "no-quarantine": true }
- 系统托盘悬浮窗
- 应用菜单功能强大
- 支持斜杠命令及其配置(可手动配置或从文件同步 [#55](https://github.com/lencx/ChatGPT/issues/55)
- 自定义全局快捷键 ([#108](https://github.com/lencx/ChatGPT/issues/108))
- 划词搜索 ([#122](https://github.com/lencx/ChatGPT/issues/122) 鼠标选中文本,不超过 400 个字符):应用使用 Tauri 构建,因其安全限制,会导致部分操作按钮无效,建议前往浏览器操作。
### 菜单项
### #️⃣ 菜单项
- **Preferences (喜好)**
- `Theme` - `Light`, `Dark` (仅支持 macOS 和 Windows)
- `Theme` - `Light`, `Dark`, `System` (仅支持 macOS 和 Windows)
- `Stay On Top`: 窗口置顶
- `Titlebar`: 是否显示 `Titlebar`,仅 macOS 支持
- `Inject Script`: 用于修改网站的用户自定义脚本
@@ -99,7 +116,7 @@ cask "popcorn-time", args: { "no-quarantine": true }
- `Report Bug`: 报告 BUG 或反馈建议
- `Toggle Developer Tools`: 网站调试工具,调试页面或脚本可能需要
## 应用配置
## ⚙️ 应用配置
| 平台 | 路径 |
| ------- | ------------------------- |
@@ -121,11 +138,11 @@ cask "popcorn-time", args: { "no-quarantine": true }
- `4f695d3cfbf8491e9b1f3fab6d85715c.json` - 随机生成的文件名,缓存 `sync_custom` 数据
- `bd1b96f15a1644f7bd647cc53073ff8f.json` - 随机生成的文件名,缓存 `sync_custom` 数据
### Sync Custom
### 客户端信息同步
目前同步自定文件仅支持 json 和 csv且需要满足以下格式否则会导致应用异常
> JSON 格式
`JSON 格式`
```json
[
@@ -142,7 +159,7 @@ cask "popcorn-time", args: { "no-quarantine": true }
]
```
> CSV 格式
`CSV 格式`
```csv
"cmd","act","prompt"
@@ -152,13 +169,9 @@ cask "popcorn-time", args: { "no-quarantine": true }
## 👀 预览
<img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/control-center.png" alt="control center">
<img width="320" src="./assets/export.png" alt="export"> <img width="320" src="./assets/tray.png" alt="tray">
<img width="320" src="./assets/tray-login.png" alt="tray login"> <img width="320" src="./assets/auto-update.png" alt="auto update">
---
<a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
<img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/chatgpt-control-center-general.png" alt="control center">
<img width="320" src="./assets/chatgpt-export.png" alt="export"> <img width="320" src="./assets/chatgpt-dalle2-tray.png" alt="dalle2 tray">
<img width="320" src="./assets/auto-update.png" alt="auto update">
## ❓ 常见问题
@@ -174,12 +187,14 @@ cask "popcorn-time", args: { "no-quarantine": true }
它是安全的,仅仅只是对 [OpenAI ChatGPT](https://chat.openai.com) 网站的包装,注入了一些额外功能(均在本地,未发起网络请求),如果存疑,可以检查源代码。
### Developer cannot be verified?
### 开发者未验证?
Mac 上无法安装,提示开发者未验证,具体可以查看下面给出的解决方案(它是开源的,很安全)。
- [Open a Mac app from an unidentified developer](https://support.apple.com/en-sg/guide/mac-help/mh40616/mac)
---
### 我想自己构建它?
#### 预安装
@@ -216,7 +231,7 @@ yarn build
---
[![Star History Chart](https://api.star-history.com/svg?repos=lencx/chatgpt&type=Date)](https://star-history.com/#lencx/chatgpt&Date)
[![Star History Chart](https://api.star-history.com/svg?repos=lencx/chatgpt&type=Timeline)](https://star-history.com/#lencx/chatgpt&Timeline)
## 中国用户

119
README.md
View File

@@ -1,68 +1,84 @@
<p align="center">
<img width="180" src="./public/logo.png" alt="ChatGPT">
<h1 align="center">ChatGPT</h1>
<p align="center">ChatGPT Desktop Application (Mac, Windows and Linux)</p>
</p>
> ChatGPT Desktop Application
[![English badge](https://img.shields.io/badge/%E8%8B%B1%E6%96%87-English-blue)](./README.md)
[![简体中文 badge](https://img.shields.io/badge/%E7%AE%80%E4%BD%93%E4%B8%AD%E6%96%87-Simplified%20Chinese-blue)](./README-ZH_CN.md)
[![简体中文 badge](https://img.shields.io/badge/%E7%AE%80%E4%BD%93%E4%B8%AD%E6%96%87-Simplified%20Chinese-blue)](./README-ZH_CN.md)\
![License](https://img.shields.io/badge/License-Apache%202-green.svg)
[![ChatGPT downloads](https://img.shields.io/github/downloads/lencx/ChatGPT/total.svg?style=flat-square)](https://github.com/lencx/ChatGPT/releases)
[![chat](https://img.shields.io/badge/chat-discord-blue?style=flat&logo=discord)](https://discord.gg/aPhCRf4zZr)
[![lencx](https://img.shields.io/twitter/follow/lencx_.svg?style=social)](https://twitter.com/lencx_)
<!-- [![中文版 badge](https://img.shields.io/badge/%E4%B8%AD%E6%96%87-Traditional%20Chinese-blue)](./README-ZH.md) -->
[Awesome ChatGPT](./AWESOME.md)
<a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 40px !important;width: 145px !important;" ></a>
## 📦 Downloads
## 📦 Install
[📝 Update Log](./UPDATE_LOG.md)
- [📝 Update Log](./UPDATE_LOG.md)
- [🕒 History versions...](https://github.com/lencx/ChatGPT/releases)
<!-- download start -->
**Latest:**
### Windows
- `Mac`: [ChatGPT_0.6.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/ChatGPT_0.6.2_x64.dmg)
- `Linux`: [chat-gpt_0.6.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/chat-gpt_0.6.2_amd64.deb)
- `Windows`: [ChatGPT_0.6.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.2/ChatGPT_0.6.2_x64_en-US.msi)
- [ChatGPT_0.8.1_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT_0.8.1_x64_en-US.msi): Direct download installer
- Use [winget](https://winstall.app/apps/lencx.ChatGPT):
```bash
# install the latest version
winget install --id=lencx.ChatGPT -e
[Other version...](https://github.com/lencx/ChatGPT/releases)
# install the specified version
winget install --id=lencx.ChatGPT -e --version 0.8.1
```
**Note: If the installation path and application name are the same, it will lead to conflict ([#142](https://github.com/lencx/ChatGPT/issues/142#issuecomment-0.8.1))**
### Mac
- [ChatGPT_0.8.1_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT_0.8.1_x64.dmg): Direct download installer
- [ChatGPT.app.tar.gz](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/ChatGPT.app.tar.gz): Download the `.app` installer
- Homebrew \
Or you can install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine
```
Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this:
```rb
repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git"
cask "chatgpt", args: { "no-quarantine": true }
```
### Linux
- [chat-gpt_0.8.1_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/chat-gpt_0.8.1_amd64.deb): Download `.deb` installer, advantage small size, disadvantage poor compatibility
- [chat-gpt_0.8.1_amd64.AppImage](https://github.com/lencx/ChatGPT/releases/download/v0.8.1/chat-gpt_0.8.1_amd64.AppImage): Works reliably, you can try it if `.deb` fails to run
- Available on [AUR](https://aur.archlinux.org/packages/chatgpt-desktop-bin) with the package name `chatgpt-desktop-bin`, and you can use your favourite AUR package manager to install it.
<!-- download end -->
### Install
Easily install with _[Homebrew](https://brew.sh) ([Cask](https://docs.brew.sh/Cask-Cookbook)):_
```sh
brew tap lencx/chatgpt https://github.com/lencx/ChatGPT.git
brew install --cask chatgpt --no-quarantine
```
Also, if you keep a _[Brewfile](https://github.com/Homebrew/homebrew-bundle#usage)_, you can add something like this:
```rb
repo = "lencx/chatgpt"
tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true }
```
## 📢 Announcement
### ChatGPT Prompts!
This is a major and exciting update. It works like a `Telegram bot command` and helps you quickly populate custom models to make chatgpt work the way you want it to. This project has taken a lot of my spare time, so if it helps you, please help spread the word or star it would be a great encouragement to me. I hope I can keep updating it and adding more interesting features.
### How does it work?
You can look at [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) to find interesting features to import into the app.
You can look at **[awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts)** to find interesting features to import into the app. You can also use `Sync Prompts` to sync all in one click, and if you don't want certain prompts to appear in your slash commands, you can disable them.
![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png)
![chatgpt menu](./assets/chatgpt-menu.png)
![chatgpt sync prompts](./assets/chatgpt-sync-prompts.png)
After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`).
<!-- After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). -->
In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key. Under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported [#54](https://github.com/lencx/ChatGPT/issues/54)).
- In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar.
- Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key.
- Under the slash command, use the tab key to modify the contents of the `{q}` tag (only single changes are supported [#54](https://github.com/lencx/ChatGPT/issues/54)). Use the keyboard `` (arrow up) and `` (arrow down) keys to select the slash command.
![chatgpt](assets/chatgpt.gif)
![chatgpt-cmd](assets/chatgpt-cmd.gif)
@@ -76,11 +92,13 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
- System tray hover window
- Powerful menu items
- Support for slash commands and their configuration (can be configured manually or synchronized from a file [#55](https://github.com/lencx/ChatGPT/issues/55))
- Customize global shortcuts ([#108](https://github.com/lencx/ChatGPT/issues/108))
- Pop-up Search ([#122](https://github.com/lencx/ChatGPT/issues/122) mouse selected content, no more than 400 characters): The application is built using Tauri, and due to its security restrictions, some of the action buttons will not work, so we recommend going to your browser.
### MenuItem
## #️⃣ MenuItem
- **Preferences**
- `Theme` - `Light`, `Dark` (Only macOS and Windows are supported).
- `Theme` - `Light`, `Dark`, `System` (Only macOS and Windows are supported).
- `Stay On Top`: The window is stay on top of other windows.
- `Titlebar`: Whether to display the titlebar, supported by macOS only.
- `Hide Dock Icon` ([#35](https://github.com/lencx/ChatGPT/issues/35)): Hide application icons from the Dock(support macOS only).
@@ -101,7 +119,7 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
- `Report Bug`: Report a bug or give feedback.
- `Toggle Developer Tools`: Developer debugging tools.
## Application Configuration
## ⚙️ Application Configuration
| Platform | Path |
| -------- | ------------------------- |
@@ -127,7 +145,7 @@ In the chatgpt text input area, type a character starting with `/` to bring up t
Currently, only json and csv are supported for synchronizing custom files, and the following formats need to be met, otherwise the application will be abnormal
> JSON format:
`JSON format:`
```json
[
@@ -144,7 +162,7 @@ Currently, only json and csv are supported for synchronizing custom files, and t
]
```
> CSV format
`CSV format`
```csv
"cmd","act","prompt"
@@ -152,21 +170,18 @@ Currently, only json and csv are supported for synchronizing custom files, and t
"b","bb","bbb bbb bbb"
```
## TODO
## 📌 TODO
- Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20))
- `Control Center` - Feature Enhancements
<!-- - Web access capability ([#20](https://github.com/lencx/ChatGPT/issues/20)) -->
- `Control Center` enhancement
- `Pop-up Search` enhancement
- ...
## 👀 Preview
<img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/control-center.png" alt="control center">
<img width="320" src="./assets/export.png" alt="export"> <img width="320" src="./assets/tray.png" alt="tray">
<img width="320" src="./assets/tray-login.png" alt="tray login"> <img width="320" src="./assets/auto-update.png" alt="auto update">
---
<a href="https://www.buymeacoffee.com/lencx" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-blue.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
<img width="320" src="./assets/install.png" alt="install"> <img width="320" src="./assets/chatgpt-popup-search.png" alt="popup search">
<img width="320" src="./assets/chatgpt-control-center-general.png" alt="control center"> <img width="320" src="./assets/chatgpt-export.png" alt="export">
<img width="320" src="./assets/chatgpt-dalle2-tray.png" alt="dalle2 tray"> <img width="320" src="./assets/auto-update.png" alt="auto update">
## ❓FAQ
@@ -186,7 +201,9 @@ It's safe, just a wrapper for [OpenAI ChatGPT](https://chat.openai.com) website,
- [Open a Mac app from an unidentified developer](https://support.apple.com/en-sg/guide/mac-help/mh40616/mac)
### How do i build it?
---
### How do I build it?
#### PreInstall
@@ -222,7 +239,7 @@ yarn build
---
[![Star History Chart](https://api.star-history.com/svg?repos=lencx/chatgpt&type=Date)](https://star-history.com/#lencx/chatgpt&Date)
[![Star History Chart](https://api.star-history.com/svg?repos=lencx/chatgpt&type=Timeline)](https://star-history.com/#lencx/chatgpt&Timeline)
## 中国用户

View File

@@ -1,6 +1,61 @@
# UPDATE LOG
## v0.6.2
## v0.8.1
fix:
- export button keeps blinking
- export button in the old chat does not work
- disable export sharing links because it is a security risk
## v0.8.0
feat:
- theme enhancement (Light, Dark, System)
- automatic updates support `silent` settings
- pop-up search: select the ChatGPT content with the mouse, the `DALL·E 2` button appears, and click to jump (note: because the search content filled by the script cannot trigger the event directly, you need to enter a space in the input box to make the button clickable).
fix:
- close the main window and hide it in the tray (windows systems)
## v0.7.4
fix:
- trying to resolve linux errors: `error while loading shared libraries`
- customize global shortcuts (`Menu -> Preferences -> Control Center -> General -> Global Shortcut`)
## v0.7.3
chore:
- optimize slash command style
- optimize tray menu icon and button icons
- global shortcuts to the chatgpt app (mac: `Command + Shift + O`, windows: `Ctrl + Shift + O`)
## v0.7.2
fix: some windows systems cannot start the application
## v0.7.1
fix:
- some windows systems cannot start the application
- windows and linux add about menu (show version information)
- the tray icon is indistinguishable from the background in dark mode on window and linux
## v0.7.0
fix:
- mac m1 copy/paste does not work on some system versions
- optimize the save chat log button to a small icon, the tray window no longer provides a save chat log button (the buttons causes the input area to become larger and the content area to become smaller)
feat:
- use the keyboard `⇧` (arrow up) and `⇩` (arrow down) keys to select the slash command
<!-- - global shortcuts to the chatgpt app (mac: command+shift+o, windows: ctrl+shift+o) -->
## v0.6.10
fix: sync failure on windows
## v0.6.4
fix: path not allowed on the configured scope

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 426 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 808 KiB

BIN
assets/chatgpt-export.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

BIN
assets/chatgpt-menu.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 742 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 406 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 500 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 226 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 989 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 543 KiB

View File

@@ -1,6 +1,6 @@
cask "chatgpt" do
version "0.1.7"
sha256 "1320b30a67e2506f9b45ffd2a48243d6141171c231dd698994ae5156a637eb3f"
version "0.6.10"
sha256 "e85062565f826d32219c53b184d6df9c89441d4231cdfff775c2de8c50ac9906"
url "https://github.com/lencx/ChatGPT/releases/download/v#{version}/ChatGPT_#{version}_x64.dmg"
name "ChatGPT"

View File

@@ -8,6 +8,9 @@
"build": "yarn tauri build",
"updater": "tr updater",
"release": "tr release --git",
"fix:conf": "tr override --json.tauri_updater_active=false",
"fix:tray": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon-light.png\" --json.tauri_systemTray_iconAsTemplate=false",
"fix:tray:mac": "tr override --json.tauri_systemTray_iconPath=\"icons/tray-icon.png\" --json.tauri_systemTray_iconAsTemplate=true",
"download": "node ./scripts/download.js",
"tr": "tr",
"tauri": "tauri"

View File

@@ -17,19 +17,24 @@ tauri-build = {version = "1.2.1", features = [] }
anyhow = "1.0.66"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] }
tauri = { version = "1.2.3", features = ["api-all", "devtools", "global-shortcut", "system-tray", "updater"] }
tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] }
log = "0.4.17"
csv = "1.1.6"
thiserror = "1.0.38"
walkdir = "2.3.2"
# tokio = { version = "1.23.0", features = ["macros"] }
# reqwest = "0.11.13"
regex = "1.7.0"
tokio = { version = "1.23.0", features = ["macros"] }
reqwest = "0.11.13"
wry = "0.23.4"
dark-light = "1.0.0"
[dependencies.tauri-plugin-log]
git = "https://github.com/tauri-apps/tauri-plugin-log"
git = "https://github.com/lencx/tauri-plugin-log"
branch = "dev"
features = ["colored"]
[dependencies.tauri-plugin-autostart]
git = "https://github.com/lencx/tauri-plugin-autostart"
branch = "dev"
[features]
# by default Tauri runs in production mode
@@ -44,4 +49,4 @@ custom-protocol = [ "tauri/custom-protocol" ]
[profile.release]
strip = true
lto = true
opt-level = "z"
opt-level = "s"

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

View File

@@ -1,12 +1,28 @@
use crate::{conf::ChatConfJson, utils};
use std::{fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager};
use crate::{
app::window,
conf::{ChatConfJson, GITHUB_PROMPTS_CSV_URL},
utils,
};
use log::info;
use std::{collections::HashMap, fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager, Theme};
use walkdir::WalkDir;
#[command]
pub fn drag_window(app: AppHandle) {
app.get_window("core").unwrap().start_dragging().unwrap();
}
#[command]
pub fn dalle2_window(app: AppHandle, query: String) {
window::dalle2_window(
&app.app_handle(),
Some(query),
Some("ChatGPT & DALL·E 2".to_string()),
None,
);
}
#[command]
pub fn fullscreen(app: AppHandle) {
let win = app.get_window("core").unwrap();
@@ -34,6 +50,21 @@ pub fn get_chat_conf() -> ChatConfJson {
ChatConfJson::get_chat_conf()
}
#[command]
pub fn get_theme() -> String {
ChatConfJson::theme().unwrap_or(Theme::Light).to_string()
}
#[command]
pub fn reset_chat_conf() -> ChatConfJson {
ChatConfJson::reset_chat_conf()
}
#[command]
pub fn run_check_update(app: AppHandle, silent: bool, has_msg: Option<bool>) {
utils::run_check_update(app, silent, has_msg);
}
#[command]
pub fn form_confirm(_app: AppHandle, data: serde_json::Value) {
ChatConfJson::amend(&serde_json::json!(data), None).unwrap();
@@ -72,7 +103,7 @@ pub fn get_chat_model_cmd() -> serde_json::Value {
serde_json::from_str(&content).unwrap()
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub cmd: Option<String>,
pub act: String,
@@ -84,8 +115,17 @@ pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![];
for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap();
list.push(record);
let record: PromptRecord = result.unwrap_or_else(|err| {
info!("parse_prompt_error: {}", err);
PromptRecord {
cmd: None,
act: "".to_string(),
prompt: "".to_string(),
}
});
if !record.act.is_empty() {
list.push(record);
}
}
list
}
@@ -99,10 +139,6 @@ pub fn window_reload(app: AppHandle, label: &str) {
.unwrap();
}
use walkdir::WalkDir;
use utils::chat_root;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord {
pub cmd: String,
@@ -115,12 +151,14 @@ pub struct ModelRecord {
#[command]
pub fn cmd_list() -> Vec<ModelRecord> {
let mut list = vec![];
for entry in WalkDir::new(chat_root().join("cache_model")).into_iter().filter_map(|e| e.ok()) {
for entry in WalkDir::new(utils::chat_root().join("cache_model"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter()
.filter(|v| v.enable);
let enable_list = data.into_iter().filter(|v| v.enable);
list.extend(enable_list)
}
}
@@ -128,3 +166,138 @@ pub fn cmd_list() -> Vec<ModelRecord> {
list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len()));
list
}
#[command]
pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(GITHUB_PROMPTS_CSV_URL, Some(&app))
.await
.unwrap();
if let Some(v) = res {
let data = parse_prompt(v)
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
let data2 = data.clone();
let model = utils::chat_root().join("chat.model.json");
let model_cmd = utils::chat_root().join("chat.model.cmd.json");
let chatgpt_prompts = utils::chat_root()
.join("cache_model")
.join("chatgpt_prompts.json");
if !utils::exists(&model) {
fs::write(
&model,
serde_json::json!({
"name": "ChatGPT Model",
"link": "https://github.com/lencx/ChatGPT"
})
.to_string(),
)
.unwrap();
}
// chatgpt_prompts.json
fs::write(
chatgpt_prompts,
serde_json::to_string_pretty(&data).unwrap(),
)
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
fs::write(
model_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
"data": cmd_data,
}))
.unwrap(),
)
.unwrap();
let mut kv = HashMap::new();
kv.insert(
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// refresh window
api::dialog::message(
app.get_window("core").as_ref(),
"Sync Prompts",
"ChatGPT Prompts data has been synchronized!",
);
window_reload(app.clone(), "core");
window_reload(app, "tray");
return Some(data2);
}
None
}
#[command]
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<ModelRecord>> {
let res = utils::get_data(&url, None).await.unwrap_or_else(|err| {
info!("chatgpt_http_error: {}", err);
None
});
info!("chatgpt_http_url: {}", url);
if let Some(v) = res {
let data;
if data_type == "csv" {
info!("chatgpt_http_csv_parse");
data = parse_prompt(v);
} else if data_type == "json" {
info!("chatgpt_http_json_parse");
data = serde_json::from_str(&v).unwrap_or_else(|err| {
info!("chatgpt_http_json_parse_error: {}", err);
vec![]
});
} else {
info!("chatgpt_http_unknown_type");
data = vec![];
}
let data = data
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["user-sync".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
return Some(data);
}
None
}

View File

@@ -1,43 +1,68 @@
use crate::{
app::{cmd, window},
conf::{self, ChatConfJson},
utils,
};
use tauri::{
AboutMetadata, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray,
SystemTrayEvent, SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent,
AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, SystemTrayEvent,
SystemTrayMenu, SystemTrayMenuItem, WindowMenuEvent,
};
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
#[cfg(target_os = "macos")]
use tauri::AboutMetadata;
// --- Menu
pub fn init() -> Menu {
let chat_conf = ChatConfJson::get_chat_conf();
let name = "ChatGPT";
let app_menu = Submenu::new(
name,
Menu::new()
.add_native_item(MenuItem::About(name.into(), AboutMetadata::default()))
.add_native_item(MenuItem::Services)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Hide)
.add_native_item(MenuItem::HideOthers)
.add_native_item(MenuItem::ShowAll)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Quit),
Menu::with_items([
#[cfg(target_os = "macos")]
MenuItem::About(name.into(), AboutMetadata::default()).into(),
#[cfg(not(target_os = "macos"))]
CustomMenuItem::new("about".to_string(), "About ChatGPT").into(),
CustomMenuItem::new("check_update".to_string(), "Check for Updates").into(),
MenuItem::Services.into(),
MenuItem::Hide.into(),
MenuItem::HideOthers.into(),
MenuItem::ShowAll.into(),
MenuItem::Separator.into(),
MenuItem::Quit.into(),
]),
);
let stay_on_top =
CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T");
let titlebar =
CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B");
let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light");
let theme_dark = CustomMenuItem::new("theme_dark".to_string(), "Dark");
let is_dark = chat_conf.theme == "Dark";
let stay_on_top_menu = if chat_conf.stay_on_top {
stay_on_top.selected()
} else {
stay_on_top
};
#[cfg(target_os = "macos")]
let titlebar =
CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B");
let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light");
let theme_dark = CustomMenuItem::new("theme_dark".to_string(), "Dark");
let theme_system = CustomMenuItem::new("theme_system".to_string(), "System");
let is_dark = chat_conf.theme == "Dark";
let is_system = chat_conf.theme == "System";
let update_prompt = CustomMenuItem::new("update_prompt".to_string(), "Prompt");
let update_silent = CustomMenuItem::new("update_silent".to_string(), "Silent");
let _update_disable = CustomMenuItem::new("update_disable".to_string(), "Disable");
let popup_search = CustomMenuItem::new("popup_search".to_string(), "Pop-up Search");
let popup_search_menu = if chat_conf.popup_search {
popup_search.selected()
} else {
popup_search
};
#[cfg(target_os = "macos")]
let titlebar_menu = if chat_conf.titlebar {
titlebar.selected()
} else {
@@ -51,21 +76,6 @@ pub fn init() -> Menu {
.accelerator("CmdOrCtrl+Shift+P")
.into(),
MenuItem::Separator.into(),
Submenu::new(
"Theme",
Menu::new()
.add_item(if is_dark {
theme_light
} else {
theme_light.selected()
})
.add_item(if is_dark {
theme_dark.selected()
} else {
theme_dark
}),
)
.into(),
stay_on_top_menu.into(),
#[cfg(target_os = "macos")]
titlebar_menu.into(),
@@ -75,6 +85,47 @@ pub fn init() -> Menu {
.accelerator("CmdOrCtrl+J")
.into(),
MenuItem::Separator.into(),
Submenu::new(
"Theme",
Menu::new()
.add_item(if is_dark || is_system {
theme_light
} else {
theme_light.selected()
})
.add_item(if is_dark {
theme_dark.selected()
} else {
theme_dark
})
.add_item(if is_system {
theme_system.selected()
} else {
theme_system
}),
)
.into(),
Submenu::new(
"Auto Update",
Menu::new()
.add_item(if chat_conf.auto_update == "Prompt" {
update_prompt.selected()
} else {
update_prompt
})
.add_item(if chat_conf.auto_update == "Silent" {
update_silent.selected()
} else {
update_silent
}), // .add_item(if chat_conf.auto_update == "Disable" {
// update_disable.selected()
// } else {
// update_disable
// })
)
.into(),
MenuItem::Separator.into(),
popup_search_menu.into(),
CustomMenuItem::new("sync_prompts".to_string(), "Sync Prompts").into(),
MenuItem::Separator.into(),
CustomMenuItem::new("go_conf".to_string(), "Go to Config")
@@ -133,6 +184,8 @@ pub fn init() -> Menu {
let window_menu = Submenu::new(
"Window",
Menu::new()
.add_item(CustomMenuItem::new("dalle2".to_string(), "DALL·E 2"))
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Minimize)
.add_native_item(MenuItem::Zoom),
);
@@ -155,9 +208,9 @@ pub fn init() -> Menu {
Menu::new()
.add_submenu(app_menu)
.add_submenu(preferences_menu)
.add_submenu(window_menu)
.add_submenu(edit_menu)
.add_submenu(view_menu)
.add_submenu(window_menu)
.add_submenu(help_menu)
}
@@ -165,29 +218,50 @@ pub fn init() -> Menu {
pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
let win = Some(event.window()).unwrap();
let app = win.app_handle();
let state: tauri::State<conf::ChatState> = app.state();
let script_path = utils::script_path().to_string_lossy().to_string();
let menu_id = event.menu_item_id();
let core_window = app.get_window("core").unwrap();
let menu_handle = core_window.menu_handle();
let menu_handle = win.menu_handle();
match menu_id {
// App
"about" => {
let tauri_conf = utils::get_tauri_conf().unwrap();
tauri::api::dialog::message(
app.get_window("core").as_ref(),
"ChatGPT",
format!("Version {}", tauri_conf.package.version.unwrap()),
);
}
"check_update" => {
utils::run_check_update(app, false, None);
}
// Preferences
"control_center" => app.get_window("main").unwrap().show().unwrap(),
"control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, script_path),
"go_conf" => utils::open_file(utils::chat_root()),
"clear_conf" => utils::clear_conf(&app),
"awesome" => open(&app, conf::AWESOME_URL.to_string()),
"popup_search" => {
let chat_conf = conf::ChatConfJson::get_chat_conf();
let popup_search = !chat_conf.popup_search;
menu_handle
.get_item(menu_id)
.set_selected(popup_search)
.unwrap();
ChatConfJson::amend(&serde_json::json!({ "popup_search": popup_search }), None)
.unwrap();
cmd::window_reload(app.clone(), "core");
cmd::window_reload(app, "tray");
}
"sync_prompts" => {
tauri::api::dialog::ask(
app.get_window("main").as_ref(),
app.get_window("core").as_ref(),
"Sync Prompts",
"Data sync will enable all prompts, are you sure you want to sync?",
move |is_restart| {
if is_restart {
app.get_window("main")
app.get_window("core")
.unwrap()
.eval("window.__sync_prompts && window.__sync_prompts()")
.unwrap()
@@ -207,28 +281,61 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
.unwrap();
tauri::api::process::restart(&app.env());
}
"theme_light" | "theme_dark" => {
let theme = if menu_id == "theme_dark" {
"Dark"
} else {
"Light"
"theme_light" | "theme_dark" | "theme_system" => {
let theme = match menu_id {
"theme_dark" => "Dark",
"theme_system" => "System",
_ => "Light",
};
ChatConfJson::amend(&serde_json::json!({ "theme": theme }), Some(app)).unwrap();
}
"update_prompt" | "update_silent" | "update_disable" => {
// for id in ["update_prompt", "update_silent", "update_disable"] {
for id in ["update_prompt", "update_silent"] {
menu_handle.get_item(id).set_selected(false).unwrap();
}
let auto_update = match menu_id {
"update_silent" => {
menu_handle
.get_item("update_silent")
.set_selected(true)
.unwrap();
"Silent"
}
"update_disable" => {
menu_handle
.get_item("update_disable")
.set_selected(true)
.unwrap();
"Disable"
}
_ => {
menu_handle
.get_item("update_prompt")
.set_selected(true)
.unwrap();
"Prompt"
}
};
ChatConfJson::amend(&serde_json::json!({ "auto_update": auto_update }), None).unwrap();
}
"stay_on_top" => {
let mut stay_on_top = state.stay_on_top.lock().unwrap();
*stay_on_top = !*stay_on_top;
let chat_conf = conf::ChatConfJson::get_chat_conf();
let stay_on_top = !chat_conf.stay_on_top;
menu_handle
.get_item(menu_id)
.set_selected(*stay_on_top)
.set_selected(stay_on_top)
.unwrap();
win.set_always_on_top(*stay_on_top).unwrap();
ChatConfJson::amend(&serde_json::json!({ "stay_on_top": *stay_on_top }), None).unwrap();
win.set_always_on_top(stay_on_top).unwrap();
ChatConfJson::amend(&serde_json::json!({ "stay_on_top": stay_on_top }), None).unwrap();
}
// Window
"dalle2" => window::dalle2_window(&app, None, None, Some(false)),
// View
"reload" => win.eval("window.location.reload()").unwrap(),
"go_back" => win.eval("window.history.go(-1)").unwrap(),
"go_forward" => win.eval("window.history.go(1)").unwrap(),
// core: document.querySelector('main .overflow-y-auto')
"scroll_top" => win
.eval(
r#"window.scroll({
@@ -260,23 +367,38 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
// --- SystemTray Menu
pub fn tray_menu() -> SystemTray {
SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
)
if cfg!(target_os = "macos") {
SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new(
"show_dock_icon".to_string(),
"Show Dock Icon",
))
.add_item(CustomMenuItem::new(
"hide_dock_icon".to_string(),
"Hide Dock Icon",
))
.add_item(CustomMenuItem::new("show_core".to_string(), "Show ChatGPT"))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
)
} else {
SystemTray::new().with_menu(
SystemTrayMenu::new()
.add_item(CustomMenuItem::new(
"control_center".to_string(),
"Control Center",
))
.add_item(CustomMenuItem::new("show_core".to_string(), "Show ChatGPT"))
.add_native_item(SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")),
)
}
}
// --- SystemTray Event
@@ -304,7 +426,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
}
}
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => app.get_window("main").unwrap().show().unwrap(),
"control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))
@@ -317,6 +439,15 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
.unwrap();
}
}
"show_core" => {
let core_win = app.get_window("core").unwrap();
let tray_win = app.get_window("tray").unwrap();
if !core_win.is_visible().unwrap() {
core_win.show().unwrap();
core_win.set_focus().unwrap();
tray_win.hide().unwrap();
}
}
"quit" => std::process::exit(0),
_ => (),
},

View File

@@ -1,22 +1,55 @@
use crate::{app::window, conf::ChatConfJson, utils};
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, Manager};
use log::info;
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, GlobalShortcutManager, Manager};
use wry::application::accelerator::Accelerator;
pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> {
info!("stepup");
let chat_conf = ChatConfJson::get_chat_conf();
let url = chat_conf.origin.to_string();
let theme = ChatConfJson::theme();
let handle = app.app_handle();
std::thread::spawn(move || {
tauri::async_runtime::spawn(async move {
window::tray_window(&handle);
});
if let Some(v) = chat_conf.global_shortcut {
info!("global_shortcut: `{}`", v);
match v.parse::<Accelerator>() {
Ok(_) => {
info!("global_shortcut_register");
let handle = app.app_handle();
let mut shortcut = app.global_shortcut_manager();
shortcut
.register(&v, move || {
if let Some(w) = handle.get_window("core") {
if w.is_visible().unwrap() {
w.hide().unwrap();
} else {
w.show().unwrap();
w.set_focus().unwrap();
}
}
})
.unwrap_or_else(|err| {
info!("global_shortcut_register_error: {}", err);
});
}
Err(err) => {
info!("global_shortcut_parse_error: {}", err);
}
}
} else {
info!("global_shortcut_unregister");
};
if chat_conf.hide_dock_icon {
#[cfg(target_os = "macos")]
app.set_activation_policy(tauri::ActivationPolicy::Accessory);
} else {
let app = app.handle();
std::thread::spawn(move || {
tauri::async_runtime::spawn(async move {
#[cfg(target_os = "macos")]
WindowBuilder::new(&app, "core", WindowUrl::App(url.into()))
.title("ChatGPT")
@@ -28,9 +61,12 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.always_on_top(chat_conf.stay_on_top)
.title_bar_style(ChatConfJson::titlebar())
.initialization_script(&utils::user_script())
.initialization_script(include_str!("../assets/html2canvas.js"))
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../vendors/floating-ui-core.js"))
.initialization_script(include_str!("../vendors/floating-ui-dom.js"))
.initialization_script(include_str!("../vendors/html2canvas.js"))
.initialization_script(include_str!("../vendors/jspdf.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/popup.core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
@@ -46,9 +82,12 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.theme(theme)
.always_on_top(chat_conf.stay_on_top)
.initialization_script(&utils::user_script())
.initialization_script(include_str!("../assets/html2canvas.js"))
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../vendors/floating-ui-core.js"))
.initialization_script(include_str!("../vendors/floating-ui-dom.js"))
.initialization_script(include_str!("../vendors/html2canvas.js"))
.initialization_script(include_str!("../vendors/jspdf.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/popup.core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
@@ -57,5 +96,12 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
});
}
// auto_update
if chat_conf.auto_update != "Disable" {
info!("stepup::run_check_update");
let app = app.handle();
utils::run_check_update(app, chat_conf.auto_update == "Silent", None);
}
Ok(())
}

View File

@@ -1,12 +1,14 @@
use crate::{conf, utils};
use tauri::{utils::config::WindowUrl, window::WindowBuilder};
use log::info;
use std::time::SystemTime;
use tauri::{utils::config::WindowUrl, window::WindowBuilder, Manager};
pub fn tray_window(handle: &tauri::AppHandle) {
let chat_conf = conf::ChatConfJson::get_chat_conf();
let theme = conf::ChatConfJson::theme();
let app = handle.clone();
std::thread::spawn(move || {
tauri::async_runtime::spawn(async move {
WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into()))
.title("ChatGPT")
.resizable(false)
@@ -16,11 +18,11 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.always_on_top(true)
.theme(theme)
.initialization_script(&utils::user_script())
.initialization_script(include_str!("../assets/html2canvas.js"))
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../vendors/floating-ui-core.js"))
.initialization_script(include_str!("../vendors/floating-ui-dom.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.initialization_script(include_str!("../assets/popup.core.js"))
.user_agent(&chat_conf.ua_tray)
.build()
.unwrap()
@@ -28,3 +30,78 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.unwrap();
});
}
pub fn dalle2_window(
handle: &tauri::AppHandle,
query: Option<String>,
title: Option<String>,
is_new: Option<bool>,
) {
info!("dalle2_query: {:?}", query);
let theme = conf::ChatConfJson::theme();
let app = handle.clone();
let query = if query.is_some() {
format!(
"window.addEventListener('DOMContentLoaded', function() {{\nwindow.__CHATGPT_QUERY__='{}';\n}})",
query.unwrap()
)
} else {
"".to_string()
};
let label = if is_new.unwrap_or(true) {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
format!("dalle2_{}", timestamp)
} else {
"dalle2".to_string()
};
if app.get_window("dalle2").is_none() {
tauri::async_runtime::spawn(async move {
WindowBuilder::new(
&app,
label,
WindowUrl::App("https://labs.openai.com".into()),
)
.title(title.unwrap_or_else(|| "DALL·E 2".to_string()))
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
.always_on_top(false)
.theme(theme)
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(&query)
.initialization_script(include_str!("../assets/dalle2.js"))
.build()
.unwrap();
});
} else {
let dalle2_win = app.get_window("dalle2").unwrap();
dalle2_win.show().unwrap();
dalle2_win.set_focus().unwrap();
}
}
pub fn control_window(handle: &tauri::AppHandle) {
let app = handle.clone();
tauri::async_runtime::spawn(async move {
if app.app_handle().get_window("main").is_none() {
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
.title("Control Center")
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
.min_inner_size(800.0, 600.0)
.build()
.unwrap();
} else {
let main_win = app.app_handle().get_window("main").unwrap();
main_win.show().unwrap();
main_win.set_focus().unwrap();
}
});
}

View File

@@ -13,13 +13,30 @@ function init() {
z-index: 9999;
}
.chat-model-cmd-list>div {
border: solid 2px #d8d8d8;
border: solid 2px rgba(80,80,80,.3);
border-radius: 5px;
background-color: #fff;
}
html.dark .chat-model-cmd-list>div {
background-color: #4a4a4a;
}
html.dark .chat-model-cmd-list .cmd-item {
border-color: #666;
}
html.dark .chat-model-cmd-list .cmd-item b {
color: #e8e8e8;
}
html.dark .chat-model-cmd-list .cmd-item i {
color: #999;
}
html.dark .chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.5);
}
.chat-model-cmd-list .cmd-item {
font-size: 12px;
border-bottom: solid 1px #888;
border-bottom: solid 1px rgba(80,80,80,.2);
padding: 2px 4px;
display: flex;
user-select: none;
@@ -28,6 +45,9 @@ function init() {
.chat-model-cmd-list .cmd-item:last-child {
border-bottom: none;
}
.chat-model-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.3);
}
.chat-model-cmd-list .cmd-item b {
display: inline-block;
width: 100px;
@@ -46,7 +66,21 @@ function init() {
white-space: nowrap;
text-align: right;
color: #888;
}`;
}
.chatappico {
width: 20px;
height: 20px;
}
.chatappico.pdf {
width: 24px;
height: 24px;
}
@media screen and (max-width: 767px) {
#download-png-button, #download-pdf-button, #download-html-button {
display: none;
}
}
`;
document.head.append(styleDom);
if (window.formInterval) {
@@ -70,11 +104,24 @@ async function cmdTip() {
// fix: tray window
if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
modelDom.style.bottom = '40px';
modelDom.style.bottom = '54px';
}
document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-cmd="${v.cmd}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const renderList = (v) => {
modelDom.innerHTML = `<div>${v.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = v[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = v[0]?.cmd.trim();
window.__list = modelDom.querySelectorAll('.cmd-item');
window.__index = 0;
window.__list[window.__index].classList.add('selected');
};
const setPrompt = (v = '') => {
if (v.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${v.trim()}}`);
}
}
const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.
@@ -84,6 +131,35 @@ async function cmdTip() {
return;
}
// ------------------ Keyboard scrolling (ArrowUp | ArrowDown) --------------------------
if (event.keyCode === 38 && window.__index > 0) { // ArrowUp
window.__list[window.__index].classList.remove('selected');
window.__index = window.__index - 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
}
if (event.keyCode === 40 && window.__index < window.__list.length - 1) { // ArrowDown
window.__list[window.__index].classList.remove('selected');
window.__index = window.__index + 1;
window.__list[window.__index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(window.__list[window.__index].getAttribute('data-prompt'));
searchInput.value = `/${window.__list[window.__index].getAttribute('data-cmd')}`;
event.preventDefault();
}
const containerHeight = modelDom.offsetHeight;
const itemHeight = window.__list[0].offsetHeight + 1;
const itemTop = window.__list[window.__index].offsetTop;
const itemBottom = itemTop + itemHeight;
if (itemTop < modelDom.scrollTop || itemBottom > modelDom.scrollTop + containerHeight) {
modelDom.scrollTop = itemTop;
}
// ------------------ TAB key replaces `{q}` tag content -------------------------------
// feat: https://github.com/lencx/ChatGPT/issues/54
if (event.keyCode === 9 && !window.__CHAT_MODEL_STATUS__) {
const strGroup = window.__CHAT_MODEL_CMD_PROMPT__.match(/\{([^{}]*)\}/) || [];
@@ -95,38 +171,34 @@ async function cmdTip() {
event.preventDefault();
}
if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) {
if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) { // TAB
const data = searchInput.value.split('|->');
if (data[1]?.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${data[1]?.trim()}}`);
setPrompt(data[1]);
window.__CHAT_MODEL_STATUS__ = 2;
}
event.preventDefault();
}
// input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) {
console.log('«110» /src/assets/cmd.js ~> ', __CHAT_MODEL_STATUS__);
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) { // TAB
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__;
event.preventDefault();
}
// type in a space to complete the fill
// ------------------ type in a space to complete the fill ------------------------------------
if (event.keyCode === 32) {
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;
}
// send
if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) {
// ------------------ send --------------------------------------------------------------------
if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) { // Enter
const data = searchInput.value.split('|->');
if (data[1]?.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(/\{([^{}]*)\}/, `{${data[1]?.trim()}}`);
}
setPrompt(data[1]);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
@@ -137,7 +209,7 @@ async function cmdTip() {
}
});
searchInput.addEventListener('input', (event) => {
searchInput.addEventListener('input', () => {
if (searchInput.value === '') {
delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__;
@@ -154,17 +226,13 @@ async function cmdTip() {
// all cmd result
if (query === '/') {
modelDom.innerHTML = `<div>${data.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = data[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = data[0]?.cmd.trim();
renderList(data);
return;
}
const result = data.filter(i => new RegExp(query.substring(1)).test(i.cmd));
if (result.length > 0) {
modelDom.innerHTML = `<div>${result.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = result[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = result[0]?.cmd.trim();
renderList(result);
} else {
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;

View File

@@ -71,6 +71,7 @@ async function init() {
document.addEventListener("click", (e) => {
const origin = e.target.closest("a");
if (!origin || !origin.target) return;
if (origin && origin.href && origin.target !== '_self') {
invoke('open_link', { url: origin.href });
}
@@ -86,6 +87,10 @@ async function init() {
}
}
});
window.__sync_prompts = async function() {
await invoke('sync_prompts', { time: Date.now() });
}
}
if (

40
src-tauri/src/assets/dalle2.js vendored Normal file
View File

@@ -0,0 +1,40 @@
// *** Core Script - DALL·E 2 ***
async function init() {
document.addEventListener("click", (e) => {
const origin = e.target.closest("a");
if (!origin || !origin.target) return;
if (origin && origin.href && origin.target !== '_self') {
if (/\/(login|signup)$/.test(window.location.href)) {
origin.target = '_self';
} else {
invoke('open_link', { url: origin.href });
}
}
});
if (window.searchInterval) {
clearInterval(window.searchInterval);
}
window.searchInterval = setInterval(() => {
const searchInput = document.querySelector('.image-prompt-form-wrapper form>.text-input');
if (searchInput) {
clearInterval(window.searchInterval);
if (!window.__CHATGPT_QUERY__) return;
const query = decodeURIComponent(window.__CHATGPT_QUERY__);
searchInput.focus();
searchInput.value = query;
}
}, 200)
}
if (
document.readyState === "complete" ||
document.readyState === "interactive"
) {
init();
} else {
document.addEventListener("DOMContentLoaded", init);
}

View File

@@ -3,6 +3,7 @@
const buttonOuterHTMLFallback = `<button class="btn flex justify-center gap-2 btn-neutral" id="download-png-button">Try Again</button>`;
async function init() {
if (window.innerWidth < 767) return;
const chatConf = await invoke('get_chat_conf') || {};
if (window.buttonsInterval) {
clearInterval(window.buttonsInterval);
@@ -23,7 +24,7 @@ async function init() {
} else if (shouldRemoveButtons()) {
removeButtons();
}
}, 200);
}, 1000);
}
const Format = {
@@ -46,9 +47,16 @@ function shouldRemoveButtons() {
function shouldAddButtons(actionsArea) {
// first, check if there's a "Try Again" button and no other buttons
const buttons = actionsArea.querySelectorAll("button");
const hasTryAgainButton = Array.from(buttons).some((button) => {
return !button.id?.includes("download");
});
// fix: https://github.com/lencx/ChatGPT/issues/189
if (buttons.length === 1) {
return false;
}
if (hasTryAgainButton && buttons.length === 1) {
return true;
}
@@ -88,7 +96,9 @@ function addActionsButtons(actionsArea, TryAgainButton) {
const downloadButton = TryAgainButton.cloneNode(true);
downloadButton.id = "download-png-button";
downloadButton.setAttribute("share-ext", "true");
downloadButton.innerText = "Generate PNG";
// downloadButton.innerText = "Generate PNG";
downloadButton.title = "Generate PNG";
downloadButton.innerHTML = setIcon('png');
downloadButton.onclick = () => {
downloadThread();
};
@@ -96,19 +106,25 @@ function addActionsButtons(actionsArea, TryAgainButton) {
const downloadPdfButton = TryAgainButton.cloneNode(true);
downloadPdfButton.id = "download-pdf-button";
downloadButton.setAttribute("share-ext", "true");
downloadPdfButton.innerText = "Download PDF";
// downloadPdfButton.innerText = "Download PDF";
downloadPdfButton.title = "Download PDF";
downloadPdfButton.innerHTML = setIcon('pdf');
downloadPdfButton.onclick = () => {
downloadThread({ as: Format.PDF });
};
actionsArea.appendChild(downloadPdfButton);
const exportHtml = TryAgainButton.cloneNode(true);
exportHtml.id = "download-html-button";
downloadButton.setAttribute("share-ext", "true");
exportHtml.innerText = "Share Link";
exportHtml.onclick = () => {
sendRequest();
};
actionsArea.appendChild(exportHtml);
// fix: https://github.com/lencx/ChatGPT/issues/126
// const exportHtml = TryAgainButton.cloneNode(true);
// exportHtml.id = "download-html-button";
// downloadButton.setAttribute("share-ext", "true");
// // exportHtml.innerText = "Share Link";
// exportHtml.title = "Share Link";
// exportHtml.innerHTML = setIcon('link');
// exportHtml.onclick = () => {
// sendRequest();
// };
// actionsArea.appendChild(exportHtml);
}
function downloadThread({ as = Format.PNG } = {}) {
@@ -168,12 +184,27 @@ class Elements {
this.thread = document.querySelector(
"[class*='react-scroll-to-bottom']>[class*='react-scroll-to-bottom']>div"
);
// fix: old chat https://github.com/lencx/ChatGPT/issues/185
if (!this.thread) {
this.thread = document.querySelector(
"main .overflow-y-auto"
);
}
// h-full overflow-y-auto
this.positionForm = document.querySelector("form").parentNode;
// this.styledThread = document.querySelector("main");
// this.threadContent = document.querySelector(".gAnhyd");
this.scroller = Array.from(
document.querySelectorAll('[class*="react-scroll-to"]')
).filter((el) => el.classList.contains("h-full"))[0];
// fix: old chat
if (!this.scroller) {
this.scroller = document.querySelector('main .overflow-y-auto');
}
this.hiddens = Array.from(document.querySelectorAll(".overflow-hidden"));
this.images = Array.from(document.querySelectorAll("img[srcset]"));
}
@@ -269,4 +300,12 @@ if (
init();
} else {
document.addEventListener("DOMContentLoaded", init);
}
}
function setIcon(type) {
return {
link: `<svg class="chatappico" viewBox="0 0 1024 1024"><path d="M1007.382 379.672L655.374 75.702C624.562 49.092 576 70.694 576 112.03v160.106C254.742 275.814 0 340.2 0 644.652c0 122.882 79.162 244.618 166.666 308.264 27.306 19.862 66.222-5.066 56.154-37.262C132.132 625.628 265.834 548.632 576 544.17V720c0 41.4 48.6 62.906 79.374 36.328l352.008-304c22.142-19.124 22.172-53.506 0-72.656z" p-id="8506" fill="currentColor"></path></svg>`,
png: `<svg class="chatappico" viewBox="0 0 1070 1024"><path d="M981.783273 0H85.224727C38.353455 0 0 35.374545 0 83.083636v844.893091c0 47.616 38.353455 86.574545 85.178182 86.574546h903.633454c46.917818 0 81.733818-38.958545 81.733819-86.574546V83.083636C1070.592 35.374545 1028.701091 0 981.783273 0zM335.825455 135.912727c74.193455 0 134.330182 60.974545 134.330181 136.285091 0 75.170909-60.136727 136.192-134.330181 136.192-74.286545 0-134.516364-61.021091-134.516364-136.192 0-75.264 60.229818-136.285091 134.516364-136.285091z m-161.512728 745.937455a41.890909 41.890909 0 0 1-27.648-10.379637 43.752727 43.752727 0 0 1-4.654545-61.067636l198.097454-255.162182a42.123636 42.123636 0 0 1 57.716364-6.702545l116.549818 128.139636 286.906182-352.814545c14.615273-18.711273 90.251636-106.775273 135.866182-6.935273 0.093091-0.093091 0.093091 112.965818 0.232727 247.761455 0.093091 140.8 0.093091 317.067636 0.093091 317.067636-1.024-0.093091-762.740364 0.093091-763.112727 0.093091z" fill="currentColor"></path></svg>`,
pdf: `<svg class="chatappico pdf" viewBox="0 0 1024 1024"><path d="M821.457602 118.382249H205.725895c-48.378584 0-87.959995 39.583368-87.959996 87.963909v615.731707c0 48.378584 39.581411 87.959995 87.959996 87.959996h615.733664c48.380541 0 87.961952-39.581411 87.961952-87.959996V206.346158c-0.001957-48.378584-39.583368-87.963909-87.963909-87.963909zM493.962468 457.544987c-10.112054 32.545237-21.72487 82.872662-38.806571 124.248336-8.806957 22.378397-8.380404 18.480717-15.001764 32.609808l5.71738-1.851007c58.760658-16.443827 99.901532-20.519564 138.162194-27.561607-7.67796-6.06371-14.350194-10.751884-19.631237-15.586807-26.287817-29.101504-35.464584-34.570387-70.440002-111.862636v0.003913z m288.36767 186.413594c-7.476424 8.356924-20.670227 13.191847-40.019704 13.191847-33.427694 0-63.808858-9.229597-107.79277-31.660824-75.648648 8.356924-156.097 17.214754-201.399704 31.729308-2.199293 0.876587-4.832967 1.759043-7.916674 3.077836-54.536215 93.237125-95.031389 132.767663-130.621199 131.19646-11.286054-0.49895-27.694661-7.044-32.973748-10.11988l-6.52157-6.196764-2.29517-4.353583c-3.07588-7.91863-3.954423-15.395054-2.197337-23.751977 4.838837-23.309771 29.907651-60.251638 82.686779-93.237126 8.356924-6.159587 27.430511-15.897917 45.020944-24.25484 13.311204-21.177004 19.45905-34.744531 36.341171-72.259702 19.102937-45.324228 36.505531-99.492589 47.500041-138.191543v-0.44025c-16.267727-53.219378-25.945401-89.310095-9.67376-147.80856 3.958337-16.71189 18.46702-33.864031 34.748444-33.864031h10.552304c10.115967 0 19.791684 3.520043 26.829814 10.552304 29.029107 29.031064 15.39114 103.824649 0.8805 162.323113-0.8805 2.63563-1.322707 4.832967-1.761 6.153717 17.59239 49.697378 45.400538 98.774492 73.108895 121.647926 11.436717 8.791304 22.638634 18.899444 36.71098 26.814161 19.791684-2.20125 37.517128-4.11487 55.547812-4.11487 54.540128 0 87.525615 9.67963 100.279169 30.351814 4.400543 7.034217 6.595923 15.389184 5.281043 24.1844-0.44025 10.996467-4.39663 21.112434-12.31526 29.031064z m-27.796407-36.748157c-4.394673-4.398587-17.024957-16.936907-78.601259-16.936907-3.073923 0-10.622744-0.784623-14.57521 3.612007 32.104987 14.072347 62.830525 24.757704 83.058545 24.757703 3.083707 0 5.72325-0.442207 8.356923-0.876586h1.759044c2.20125-0.8805 3.520043-1.324663 3.960293-5.71738-0.87463-1.324663-1.757087-3.083707-3.958336-4.838837z m-387.124553 63.041845c-9.237424 5.27713-16.71189 10.112054-21.112433 13.634053-31.226444 28.586901-51.018128 57.616008-53.217422 74.331812 19.789727-6.59788 45.737084-35.626987 74.329855-87.961952v-0.003913z m125.574957-297.822284l2.197336-1.761c3.079793-14.072347 5.232127-29.189554 7.87167-38.869184l1.318794-7.036174c4.39663-25.070771 2.71781-39.720334-4.76057-50.272637l-6.59788-2.20125a57.381208 57.381208 0 0 0-3.079794 5.27713c-7.474467 18.47289-7.063567 55.283661 3.0524 94.865072l-0.001956-0.001957z" fill="currentColor"></path></svg>`
}[type];
}

84
src-tauri/src/assets/popup.core.js vendored Normal file
View File

@@ -0,0 +1,84 @@
// *** Core Script - DALL·E 2 Core ***
async function init() {
const chatConf = await invoke('get_chat_conf') || {};
if (!chatConf.popup_search) return;
if (!window.FloatingUIDOM) return;
const styleDom = document.createElement('style');
styleDom.innerHTML = `
#chagpt-selection-menu {
display: none;
width: max-content;
position: absolute;
top: 0;
left: 0;
background: #4a4a4a;
color: white;
font-weight: bold;
padding: 5px 8px;
border-radius: 4px;
font-size: 12px;
cursor: pointer;
}
`;
document.head.append(styleDom);
const selectionMenu = document.createElement('div');
selectionMenu.id = 'chagpt-selection-menu';
selectionMenu.innerHTML = 'DALL·E 2';
document.body.appendChild(selectionMenu);
const { computePosition, flip, offset, shift } = window.FloatingUIDOM;
document.body.addEventListener('mousedown', async (e) => {
if (e.target.id === 'chagpt-selection-menu') {
await invoke('dalle2_window', { query: encodeURIComponent(window.__DALLE2_CONTENT__) });
} else {
delete window.__DALLE2_CONTENT__;
}
});
document.body.addEventListener("mouseup", async (e) => {
selectionMenu.style.display = 'none';
const selection = window.getSelection();
window.__DALLE2_CONTENT__ = selection.toString().trim();
if (!window.__DALLE2_CONTENT__) return;
if (selection.rangeCount > 0) {
const range = selection.getRangeAt(0);
const rect = range.getClientRects()[0];
const rootEl = document.createElement('div');
rootEl.style.top = `${rect.top}px`;
rootEl.style.position = 'fixed';
rootEl.style.left = `${rect.left}px`;
document.body.appendChild(rootEl);
selectionMenu.style.display = 'block';
computePosition(rootEl, selectionMenu, {
placement: 'top',
middleware: [
flip(),
offset(5),
shift({ padding: 5 })
]
}).then(({x, y}) => {
Object.assign(selectionMenu.style, {
left: `${x}px`,
top: `${y}px`,
});
});
}
});
}
if (
document.readyState === "complete" ||
document.readyState === "interactive"
) {
init();
} else {
document.addEventListener("DOMContentLoaded", init);
}

View File

@@ -1,22 +1,28 @@
use crate::utils::{chat_root, create_file, exists};
use anyhow::Result;
use log::info;
use serde_json::Value;
use std::{collections::BTreeMap, fs, path::PathBuf, sync::Mutex};
use std::{collections::BTreeMap, fs, path::PathBuf};
use tauri::{Manager, Theme};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
// pub const USER_AGENT: &str = "5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36";
// pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1";
// pub const USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.1 Safari/605.1.15";
// pub const PHONE_USER_AGENT: &str = "Mozilla/5.0 (iPhone; CPU iPhone OS 16_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.0 Mobile/15E148 Safari/604.1";
pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues";
pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md";
pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md";
pub const GITHUB_PROMPTS_CSV_URL: &str =
"https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv";
pub const DEFAULT_CHAT_CONF: &str = r#"{
"stay_on_top": false,
"auto_update": "Prompt",
"theme": "Light",
"titlebar": true,
"popup_search": true,
"global_shortcut": "",
"hide_dock_icon": false,
"default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com",
@@ -25,8 +31,11 @@ pub const DEFAULT_CHAT_CONF: &str = r#"{
}"#;
pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{
"stay_on_top": false,
"auto_update": "Prompt",
"theme": "Light",
"titlebar": false,
"popup_search": true,
"global_shortcut": "",
"hide_dock_icon": false,
"default_origin": "https://chat.openai.com",
"origin": "https://chat.openai.com",
@@ -34,38 +43,30 @@ pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{
"ua_tray": ""
}"#;
pub struct ChatState {
pub stay_on_top: Mutex<bool>,
}
impl ChatState {
pub fn default(chat_conf: ChatConfJson) -> Self {
ChatState {
stay_on_top: Mutex::new(chat_conf.stay_on_top),
}
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ChatConfJson {
// support macOS only
pub titlebar: bool,
pub hide_dock_icon: bool,
// macOS and Windows
// macOS and Windows, Light/Dark/System
pub theme: String,
// auto update policy, Prompt/Silent/Disable
pub auto_update: String,
pub popup_search: bool,
pub stay_on_top: bool,
pub default_origin: String,
pub origin: String,
pub ua_window: String,
pub ua_tray: String,
pub global_shortcut: Option<String>,
}
impl ChatConfJson {
/// init chat.conf.json
/// path: ~/.chatgpt/chat.conf.json
pub fn init() -> PathBuf {
info!("chat_conf_init");
let conf_file = ChatConfJson::conf_path();
let content = if cfg!(target_os = "macos") {
DEFAULT_CHAT_CONF_MAC
@@ -122,6 +123,17 @@ impl ChatConfJson {
}
}
pub fn reset_chat_conf() -> Self {
let conf_file = ChatConfJson::conf_path();
let content = if cfg!(target_os = "macos") {
DEFAULT_CHAT_CONF_MAC
} else {
DEFAULT_CHAT_CONF
};
fs::write(&conf_file, content).unwrap();
serde_json::from_str(content).unwrap()
}
// https://users.rust-lang.org/t/updating-object-fields-given-dynamic-json/39049/3
pub fn amend(new_rules: &Value, app: Option<tauri::AppHandle>) -> Result<()> {
let config = ChatConfJson::get_chat_conf();
@@ -156,11 +168,20 @@ impl ChatConfJson {
pub fn theme() -> Option<Theme> {
let conf = ChatConfJson::get_chat_conf();
if conf.theme == "Dark" {
Some(Theme::Dark)
} else {
Some(Theme::Light)
}
let theme = match conf.theme.as_str() {
"System" => match dark_light::detect() {
// Dark mode
dark_light::Mode::Dark => Theme::Dark,
// Light mode
dark_light::Mode::Light => Theme::Light,
// Unspecified
dark_light::Mode::Default => Theme::Light,
},
"Dark" => Theme::Dark,
_ => Theme::Light,
};
Some(theme)
}
#[cfg(target_os = "macos")]

View File

@@ -8,18 +8,19 @@ mod conf;
mod utils;
use app::{cmd, fs_extra, menu, setup};
use conf::{ChatConfJson, ChatState};
use conf::ChatConfJson;
use tauri::api::path;
use tauri_plugin_autostart::MacosLauncher;
use tauri_plugin_log::{
fern::colors::{Color, ColoredLevelConfig},
LogTarget, LoggerBuilder,
};
fn main() {
#[tokio::main]
async fn main() {
ChatConfJson::init();
// If the file does not exist, creating the file will block menu synchronization
utils::create_chatgpt_prompts();
let chat_conf = ChatConfJson::get_chat_conf();
let context = tauri::generate_context!();
let colors = ColoredLevelConfig {
error: Color::Red,
@@ -33,11 +34,7 @@ fn main() {
// https://github.com/tauri-apps/tauri/pull/2736
.plugin(
LoggerBuilder::new()
.level(if cfg!(debug_assertions) {
log::LevelFilter::Debug
} else {
log::LevelFilter::Trace
})
.level(log::LevelFilter::Debug)
.with_colors(colors)
.targets([
// LogTarget::LogDir,
@@ -48,25 +45,34 @@ fn main() {
])
.build(),
)
.manage(ChatState::default(chat_conf))
.invoke_handler(tauri::generate_handler![
cmd::drag_window,
cmd::fullscreen,
cmd::download,
cmd::open_link,
cmd::get_chat_conf,
cmd::get_theme,
cmd::reset_chat_conf,
cmd::run_check_update,
cmd::form_cancel,
cmd::form_confirm,
cmd::form_msg,
cmd::open_file,
cmd::get_chat_model_cmd,
cmd::parse_prompt,
cmd::sync_prompts,
cmd::sync_user_prompts,
cmd::window_reload,
cmd::dalle2_window,
cmd::cmd_list,
fs_extra::metadata,
])
.setup(setup::init)
.plugin(tauri_plugin_positioner::init())
.plugin(tauri_plugin_autostart::init(
MacosLauncher::LaunchAgent,
None,
))
.menu(menu::init())
.system_tray(menu::tray_menu())
.on_menu_event(menu::menu_handler)
@@ -75,13 +81,18 @@ fn main() {
// https://github.com/tauri-apps/tauri/discussions/2684
if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() {
let win = event.window();
if win.label() == "main" {
win.hide().unwrap();
} else {
if win.label() == "core" {
// TODO: https://github.com/tauri-apps/tauri/issues/3084
// event.window().hide().unwrap();
// https://github.com/tauri-apps/tao/pull/517
#[cfg(target_os = "macos")]
event.window().minimize().unwrap();
// fix: https://github.com/lencx/ChatGPT/issues/93
#[cfg(not(target_os = "macos"))]
event.window().hide().unwrap();
} else {
win.close().unwrap();
}
api.prevent_close();
}

View File

@@ -1,23 +1,26 @@
use anyhow::Result;
use log::info;
use regex::Regex;
use serde_json::Value;
use std::{
collections::HashMap,
fs::{self, File},
path::{Path, PathBuf},
process::Command,
};
use tauri::Manager;
// use tauri::utils::config::Config;
use tauri::updater::UpdateResponse;
use tauri::{utils::config::Config, AppHandle, Manager, Wry};
pub fn chat_root() -> PathBuf {
tauri::api::path::home_dir().unwrap().join(".chatgpt")
}
// pub fn get_tauri_conf() -> Option<Config> {
// let config_file = include_str!("../tauri.conf.json");
// let config: Config =
// serde_json::from_str(config_file).expect("failed to parse tauri.conf.json");
// Some(config)
// }
pub fn get_tauri_conf() -> Option<Config> {
let config_file = include_str!("../tauri.conf.json");
let config: Config =
serde_json::from_str(config_file).expect("failed to parse tauri.conf.json");
Some(config)
}
pub fn exists(path: &Path) -> bool {
Path::new(path).exists()
@@ -89,3 +92,141 @@ pub fn clear_conf(app: &tauri::AppHandle) {
},
);
}
pub fn merge(v: &Value, fields: &HashMap<String, Value>) -> Value {
match v {
Value::Object(m) => {
let mut m = m.clone();
for (k, v) in fields {
m.insert(k.clone(), v.clone());
}
Value::Object(m)
}
v => v.clone(),
}
}
pub fn gen_cmd(name: String) -> String {
let re = Regex::new(r"[^a-zA-Z0-9]").unwrap();
re.replace_all(&name, "_").to_lowercase()
}
pub async fn get_data(
url: &str,
app: Option<&tauri::AppHandle>,
) -> Result<Option<String>, reqwest::Error> {
let res = reqwest::get(url).await?;
let is_ok = res.status() == 200;
let body = res.text().await?;
if is_ok {
Ok(Some(body))
} else {
info!("chatgpt_http_error: {}", body);
if let Some(v) = app {
tauri::api::dialog::message(v.get_window("core").as_ref(), "ChatGPT HTTP", body);
}
Ok(None)
}
}
pub fn run_check_update(app: AppHandle<Wry>, silent: bool, has_msg: Option<bool>) {
info!("run_check_update: silent={} has_msg={:?}", silent, has_msg);
tauri::async_runtime::spawn(async move {
let result = app.updater().check().await;
let update_resp = result.unwrap();
if update_resp.is_update_available() {
if silent {
tauri::async_runtime::spawn(async move {
silent_install(app, update_resp).await.unwrap();
});
} else {
tauri::async_runtime::spawn(async move {
prompt_for_install(app, update_resp).await.unwrap();
});
}
} else if let Some(v) = has_msg {
if v {
tauri::api::dialog::message(
app.app_handle().get_window("core").as_ref(),
"ChatGPT",
"Your ChatGPT is up to date",
);
}
}
});
}
// Copy private api in tauri/updater/mod.rs. TODO: refactor to public api
// Prompt a dialog asking if the user want to install the new version
// Maybe we should add an option to customize it in future versions.
pub async fn prompt_for_install(app: AppHandle<Wry>, update: UpdateResponse<Wry>) -> Result<()> {
info!("prompt_for_install");
let windows = app.windows();
let parent_window = windows.values().next();
let package_info = app.package_info().clone();
let body = update.body().unwrap();
// todo(lemarier): We should review this and make sure we have
// something more conventional.
let should_install = tauri::api::dialog::blocking::ask(
parent_window,
format!(r#"A new version of {} is available! "#, package_info.name),
format!(
r#"{} {} is now available -- you have {}.
Would you like to install it now?
Release Notes:
{}"#,
package_info.name,
update.latest_version(),
package_info.version,
body
),
);
if should_install {
// Launch updater download process
// macOS we display the `Ready to restart dialog` asking to restart
// Windows is closing the current App and launch the downloaded MSI when ready (the process stop here)
// Linux we replace the AppImage by launching a new install, it start a new AppImage instance, so we're closing the previous. (the process stop here)
update.download_and_install().await?;
// Ask user if we need to restart the application
let should_exit = tauri::api::dialog::blocking::ask(
parent_window,
"Ready to Restart",
"The installation was successful, do you want to restart the application now?",
);
if should_exit {
app.restart();
}
}
Ok(())
}
pub async fn silent_install(app: AppHandle<Wry>, update: UpdateResponse<Wry>) -> Result<()> {
info!("silent_install");
let windows = app.windows();
let parent_window = windows.values().next();
// Launch updater download process
// macOS we display the `Ready to restart dialog` asking to restart
// Windows is closing the current App and launch the downloaded MSI when ready (the process stop here)
// Linux we replace the AppImage by launching a new install, it start a new AppImage instance, so we're closing the previous. (the process stop here)
update.download_and_install().await?;
// Ask user if we need to restart the application
let should_exit = tauri::api::dialog::blocking::ask(
parent_window,
"Ready to Restart",
"The silent installation was successful, do you want to restart the application now?",
);
if should_exit {
app.restart();
}
Ok(())
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -7,22 +7,18 @@
},
"package": {
"productName": "ChatGPT",
"version": "0.6.2"
"version": "0.8.1"
},
"tauri": {
"allowlist": {
"all": true,
"http": {
"all": true,
"scope": [
"https://**",
"http://**"
]
"globalShortcut": {
"all": true
},
"fs": {
"all": true,
"scope": [
"$HOME/**"
"$HOME/.chatgpt/**"
]
}
},
@@ -72,23 +68,11 @@
},
"updater": {
"active": true,
"dialog": true,
"dialog": false,
"endpoints": [
"https://lencx.github.io/ChatGPT/install.json"
],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK"
},
"windows": [
{
"label": "main",
"url": "index.html",
"title": "ChatGPT",
"visible": false,
"width": 800,
"height": 600,
"minWidth": 800,
"minHeight": 600
}
]
}
}
}

View File

@@ -47,6 +47,7 @@ export function useCacheModel(file = '') {
const list = await invoke('cmd_list');
await writeJSON(CHAT_MODEL_CMD_JSON, { name: 'ChatGPT CMD', last_updated: Date.now(), data: list });
await invoke('window_reload', { label: 'core' });
await invoke('window_reload', { label: 'tray' });
};
return { modelCacheJson, modelCacheSet, modelCacheCmd };

View File

@@ -18,8 +18,6 @@ export default function useData(oData: any[]) {
const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return;
console.log('«20» /src/hooks/useData.ts ~> ', val);
const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData);
};

34
src/hooks/useEvent.ts vendored
View File

@@ -1,34 +0,0 @@
import { invoke, path, http, fs, dialog } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sync_prompts');
const { modelCacheSet } = useCacheModel();
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {
(window as any).__sync_prompts = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList, file);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
dialog.message('ChatGPT Prompts data has been synchronized!');
} else {
dialog.message('ChatGPT Prompts data sync failed, please try again!');
}
}
})
}

19
src/layout/index.scss vendored
View File

@@ -1,10 +1,20 @@
.chat-logo {
text-align: center;
padding: 5px 0;
height: 48px;
img {
width: 48px;
height: 48px;
width: 44px;
height: 44px;
margin-top: 4px;
}
}
.chat-info {
text-align: center;
font-weight: bold;
.ant-tag {
margin: 2px;
}
}
@@ -21,9 +31,6 @@
.ant-menu {
user-select: none;
-webkit-user-select: none;
.ant-menu-item {
background-color: #f8f8f8;
}
}
.ant-layout-footer {

46
src/layout/index.tsx vendored
View File

@@ -1,25 +1,39 @@
import { FC, useState } from 'react';
import { Layout, Menu } from 'antd';
import { useState } from 'react';
import {Layout, Menu, Tooltip, ConfigProvider, theme, Tag } from 'antd';
import { SyncOutlined } from '@ant-design/icons';
import { useNavigate, useLocation } from 'react-router-dom';
import { getName, getVersion } from '@tauri-apps/api/app';
import { invoke } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import Routes, { menuItems } from '@/routes';
import './index.scss';
const { Content, Footer, Sider } = Layout;
interface ChatLayoutProps {
children?: React.ReactNode;
}
const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
export default function ChatLayout() {
const [collapsed, setCollapsed] = useState(false);
const [appInfo, setAppInfo] = useState<Record<string, any>>({});
const location = useLocation();
const go = useNavigate();
useInit(async () => {
setAppInfo({
appName: await getName(),
appVersion: await getVersion(),
appTheme: await invoke("get_theme"),
});
})
const checkAppUpdate = async () => {
await invoke('run_check_update', { silent: false, hasMsg: true });
}
return (
<ConfigProvider theme={{algorithm: appInfo.appTheme === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm}}>
<Layout style={{ minHeight: '100vh' }} hasSider>
<Sider
theme="light"
theme={appInfo.appTheme === "dark" ? "dark" : "light"}
collapsible
collapsed={collapsed}
onCollapse={(value) => setCollapsed(value)}
@@ -34,9 +48,20 @@ const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
}}
>
<div className="chat-logo"><img src="/logo.png" /></div>
<div className="chat-info">
<Tag>{appInfo.appName}</Tag>
<Tag>
<span style={{ marginRight: 5 }}>{appInfo.appVersion}</span>
<Tooltip title="click to check update">
<a onClick={checkAppUpdate}><SyncOutlined /></a>
</Tooltip>
</Tag>
</div>
<Menu
defaultSelectedKeys={[location.pathname]}
mode="inline"
theme={ appInfo.appTheme === "dark" ? "dark" : "light" }
inlineIndent={12}
items={menuItems}
defaultOpenKeys={['/model']}
@@ -56,7 +81,6 @@ const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
<a href="https://github.com/lencx/chatgpt" target="_blank">ChatGPT Desktop Application</a> ©2022 Created by lencx</Footer>
</Layout>
</Layout>
</ConfigProvider>
);
};
export default ChatLayout;
};

14
src/main.scss vendored
View File

@@ -45,6 +45,12 @@ html, body {
}
}
.chat-table-tip {
> span {
line-height: 16px;
}
}
.chat-sync-path {
font-size: 12px;
font-weight: 500;
@@ -52,6 +58,14 @@ html, body {
margin-bottom: 5px;
line-height: 16px;
> div {
max-width: 400px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
// color: #2a2a2a;
}
span {
display: inline-block;
// background-color: #d8d8d8;

14
src/main.tsx vendored
View File

@@ -2,23 +2,15 @@ import { StrictMode, Suspense } from 'react';
import { BrowserRouter } from 'react-router-dom';
import ReactDOM from 'react-dom/client';
import useEvent from '@/hooks/useEvent';
import Layout from '@/layout';
import './main.scss';
const App = () => {
useEvent();
return (
<BrowserRouter>
<Layout/>
</BrowserRouter>
);
}
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
<StrictMode>
<Suspense fallback={null}>
<App />
<BrowserRouter>
<Layout/>
</BrowserRouter>
</Suspense>
</StrictMode>
);

22
src/utils.ts vendored
View File

@@ -1,4 +1,4 @@
import { readTextFile, writeTextFile, exists, createDir, BaseDirectory } from '@tauri-apps/api/fs';
import { readTextFile, writeTextFile, exists, createDir } from '@tauri-apps/api/fs';
import { homeDir, join, dirname } from '@tauri-apps/api/path';
import dayjs from 'dayjs';
@@ -20,10 +20,6 @@ export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON);
}
// export const chatModelSyncPath = async (): Promise<string> => {
// return join(await chatRoot(), CHAT_MODEL_SYNC_JSON);
// }
export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV);
}
@@ -32,10 +28,16 @@ type readJSONOpts = { defaultVal?: Record<string, any>, isRoot?: boolean, isList
export const readJSON = async (path: string, opts: readJSONOpts = {}) => {
const { defaultVal = {}, isRoot = false, isList = false } = opts;
const root = await chatRoot();
const file = await join(isRoot ? '' : root, path);
let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (!await exists(file)) {
await createDir(await dirname(file), { recursive: true });
if (await dirname(file) !== root) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, isList ? '[]' : JSON.stringify({
name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT',
@@ -54,7 +56,11 @@ type writeJSONOpts = { dir?: string, isRoot?: boolean };
export const writeJSON = async (path: string, data: Record<string, any>, opts: writeJSONOpts = {}) => {
const { isRoot = false } = opts;
const root = await chatRoot();
const file = await join(isRoot ? '' : root, path);
let file = path;
if (!isRoot) {
file = await join(root, path);
}
if (isRoot && !await exists(await dirname(file))) {
await createDir(await dirname(file), { recursive: true });

176
src/view/General.tsx vendored
View File

@@ -1,36 +1,84 @@
import { useEffect, useState } from 'react';
import { Form, Radio, Switch, Input, Button, Space, message, Tooltip } from 'antd';
import { QuestionCircleOutlined } from '@ant-design/icons';
import { invoke } from '@tauri-apps/api';
import { invoke, shell, path } from '@tauri-apps/api';
import { platform } from '@tauri-apps/api/os';
import { ask } from '@tauri-apps/api/dialog';
import { relaunch } from '@tauri-apps/api/process';
import { clone, omit, isEqual } from 'lodash';
import { DISABLE_AUTO_COMPLETE } from '@/utils';
import useInit from '@/hooks/useInit';
import { DISABLE_AUTO_COMPLETE, chatRoot } from '@/utils';
const AutoUpdateLabel = () => {
return (
<span>
Auto Update <Tooltip title={(
<div>
<div>Auto Update Policy</div>
<span><strong>Prompt</strong>: prompt to install</span><br/>
<span><strong>Silent</strong>: install silently</span><br/>
{/*<span><strong>Disable</strong>: disable auto update</span><br/>*/}
</div>
)}><QuestionCircleOutlined style={{ color: '#1677ff' }} /></Tooltip>
</span>
)
}
const OriginLabel = ({ url }: { url: string }) => {
return (
<span>
Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined /></Tooltip>
Switch Origin <Tooltip title={`Default: ${url}`}><QuestionCircleOutlined style={{ color: '#1677ff' }} /></Tooltip>
</span>
)
}
const PopupSearchLabel = () => {
return (
<span>
Pop-up Search
{' '}
<Tooltip title={(
<div>
<div style={{ marginBottom: 10 }}>Generate images according to the content: Select the ChatGPT content with the mouse, no more than 400 characters. the <b>DALL·E 2</b> button appears, and click to jump (Note: because the search content filled by the script cannot trigger the event directly, you need to enter a space in the input box to make the button clickable).</div>
<div>The application is built using Tauri, and due to its security restrictions, some of the action buttons will not work, so we recommend going to your browser.</div>
</div>
)}><QuestionCircleOutlined style={{ color: '#1677ff' }} /></Tooltip>
</span>
)
}
const GlobalShortcutLabel = () => {
return (
<div>
Global Shortcut
{' '}
<Tooltip title={(
<div>
<div>Shortcut definition, modifiers and key separated by "+" e.g. CmdOrControl+Q</div>
<div style={{ margin: '10px 0'}}>If empty, the shortcut is disabled.</div>
<a href="https://tauri.app/v1/api/js/globalshortcut" target="_blank">https://tauri.app/v1/api/js/globalshortcut</a>
</div>
)}>
<QuestionCircleOutlined style={{ color: '#1677ff' }} />
</Tooltip>
</div>
)
}
export default function General() {
const [form] = Form.useForm();
const [jsonPath, setJsonPath] = useState('');
const [platformInfo, setPlatform] = useState<string>('');
const [chatConf, setChatConf] = useState<any>(null);
const init = async () => {
useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'chat.conf.json'));
setPlatform(await platform());
const chatData = await invoke('get_chat_conf');
setChatConf(chatData);
}
useEffect(() => {
init();
}, [])
});
useEffect(() => {
form.setFieldsValue(clone(chatConf));
@@ -40,6 +88,19 @@ export default function General() {
form.setFieldsValue(chatConf);
};
const onReset = async () => {
const chatData = await invoke('reset_chat_conf');
setChatConf(chatData);
const isOk = await ask(`Configuration reset successfully, whether to restart?`, {
title: 'ChatGPT Preferences'
});
if (isOk) {
relaunch();
return;
}
message.success('Configuration reset successfully');
};
const onFinish = async (values: any) => {
if (!isEqual(omit(chatConf, ['default_origin']), values)) {
await invoke('form_confirm', { data: values, label: 'main' });
@@ -55,44 +116,67 @@ export default function General() {
};
return (
<Form
form={form}
style={{ maxWidth: 500 }}
onFinish={onFinish}
labelCol={{ span: 8 }}
wrapperCol={{ span: 15, offset: 1 }}
>
<Form.Item label="Theme" name="theme">
<Radio.Group>
<Radio value="Light">Light</Radio>
<Radio value="Dark">Dark</Radio>
</Radio.Group>
</Form.Item>
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked">
<Switch />
</Form.Item>
{platformInfo === 'darwin' && (
<Form.Item label="Titlebar" name="titlebar" valuePropName="checked">
<>
<div className="chat-table-tip">
<div className="chat-sync-path">
<div>PATH: <a onClick={() => shell.open(jsonPath)} title={jsonPath}>{jsonPath}</a></div>
</div>
</div>
<Form
form={form}
style={{ maxWidth: 500 }}
onFinish={onFinish}
labelCol={{ span: 8 }}
wrapperCol={{ span: 15, offset: 1 }}
>
<Form.Item label="Stay On Top" name="stay_on_top" valuePropName="checked">
<Switch />
</Form.Item>
)}
<Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin">
<Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} />
</Form.Item>
<Form.Item label="User Agent (Window)" name="ua_window">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Form.Item>
<Form.Item label="User Agent (SystemTray)" name="ua_tray">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Form.Item>
<Form.Item>
<Space size={20}>
<Button onClick={onCancel}>Cancel</Button>
<Button type="primary" htmlType="submit">
Submit
</Button>
</Space>
</Form.Item>
</Form>
{platformInfo === 'darwin' && (
<Form.Item label="Titlebar" name="titlebar" valuePropName="checked">
<Switch />
</Form.Item>
)}
<Form.Item label={<PopupSearchLabel />} name="popup_search" valuePropName="checked">
<Switch />
</Form.Item>
<Form.Item label="Theme" name="theme">
<Radio.Group>
<Radio value="Light">Light</Radio>
<Radio value="Dark">Dark</Radio>
{["darwin", "windows"].includes(platformInfo) && (
<Radio value="System">System</Radio>
)}
</Radio.Group>
</Form.Item>
<Form.Item label={<AutoUpdateLabel />} name="auto_update">
<Radio.Group>
<Radio value="Prompt">Prompt</Radio>
<Radio value="Silent">Silent</Radio>
{/*<Radio value="Disable">Disable</Radio>*/}
</Radio.Group>
</Form.Item>
<Form.Item label={<GlobalShortcutLabel />} name="global_shortcut">
<Input placeholder="CmdOrCtrl+Shift+O" {...DISABLE_AUTO_COMPLETE} />
</Form.Item>
<Form.Item label={<OriginLabel url={chatConf?.default_origin} />} name="origin">
<Input placeholder="https://chat.openai.com" {...DISABLE_AUTO_COMPLETE} />
</Form.Item>
<Form.Item label="User Agent (Window)" name="ua_window">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Form.Item>
<Form.Item label="User Agent (SystemTray)" name="ua_tray">
<Input.TextArea autoSize={{ minRows: 4, maxRows: 4 }} {...DISABLE_AUTO_COMPLETE} placeholder="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36" />
</Form.Item>
<Form.Item>
<Space size={20}>
<Button onClick={onCancel}>Cancel</Button>
<Button type="primary" htmlType="submit">Submit</Button>
<Button type="dashed" onClick={onReset}>Reset to defaults</Button>
</Space>
</Form.Item>
</Form>
</>
)
}

View File

@@ -8,6 +8,7 @@ import useInit from '@/hooks/useInit';
interface SyncFormProps {
record?: Record<string|symbol, any> | null;
type: string;
}
const initFormValue = {
@@ -17,7 +18,8 @@ const initFormValue = {
prompt: '',
};
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }, ref) => {
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record, type }, ref) => {
const isDisabled = type === 'edit';
const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form }));
const [root, setRoot] = useState('');
@@ -34,7 +36,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
const pathOptions = (
<Form.Item noStyle name="protocol" initialValue="https">
<Select>
<Select disabled={isDisabled}>
<Select.Option value="local">{root}</Select.Option>
<Select.Option value="http">http://</Select.Option>
<Select.Option value="https">https://</Select.Option>
@@ -43,7 +45,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
);
const extOptions = (
<Form.Item noStyle name="ext" initialValue="json">
<Select>
<Select disabled={isDisabled}>
<Select.Option value="csv">.csv</Select.Option>
<Select.Option value="json">.json</Select.Option>
</Select>
@@ -90,8 +92,13 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
label="PATH"
name="path"
rules={[{ required: true, message: 'Please input path!' }]}
>
<Input placeholder="YOUR_PATH" addonBefore={pathOptions} addonAfter={extOptions} {...DISABLE_AUTO_COMPLETE} />
>
<Input
placeholder="YOUR_PATH"
addonBefore={pathOptions}
addonAfter={extOptions}
{...DISABLE_AUTO_COMPLETE}
/>
</Form.Item>
<Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item>
</Form>

View File

@@ -34,7 +34,7 @@ export const syncColumns = () => [
key: 'last_updated',
width: 140,
render: (v: number) => (
<div style={{ textAlign: 'center' }}>
<div>
<HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} />
{ v ? fmtDate(v) : ''}
</div>
@@ -47,7 +47,15 @@ export const syncColumns = () => [
render: (_: any, row: any, actions: any) => {
return (
<Space>
<a onClick={() => actions.setRecord(row, 'sync')}>Sync</a>
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
onConfirm={() => actions.setRecord(row, 'sync')}
okText="Yes"
cancelText="No"
>
<a>Sync</a>
</Popconfirm>
{row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>}
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm

View File

@@ -1,6 +1,6 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Modal, Button, message } from 'antd';
import { invoke, http, path, fs } from '@tauri-apps/api';
import { invoke, path, fs } from '@tauri-apps/api';
import useData from '@/hooks/useData';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
@@ -10,7 +10,7 @@ import { CHAT_MODEL_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import { syncColumns, getPath } from './config';
import SyncForm from './Form';
const setTag = (data: Record<string, any>[]) => data.map((i) => ({ ...i, tags: ['user-sync'], enable: true }))
const fmtData = (data: Record<string, any>[] = []) => (Array.isArray(data) ? data : []).map((i) => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), tags: ['user-sync'], enable: true }));
export default function SyncCustom() {
const [isVisible, setVisible] = useState(false);
@@ -34,7 +34,9 @@ export default function SyncCustom() {
if (!opInfo.opType) return;
if (opInfo.opType === 'sync') {
const filename = `${opInfo?.opRecord?.id}.json`;
handleSync(filename).then(() => {
handleSync(filename).then((isOk: boolean) => {
opInfo.resetRecord();
if (!isOk) return;
const data = opReplace(opInfo?.opRecord?.[opSafeKey], { ...opInfo?.opRecord, last_updated: Date.now() });
modelSet(data);
opInfo.resetRecord();
@@ -44,9 +46,16 @@ export default function SyncCustom() {
setVisible(true);
}
if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
(async () => {
try {
const file = await path.join(await chatRoot(), 'cache_model', `${opInfo?.opRecord?.id}.json`);
await fs.removeFile(file);
} catch(e) {}
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
opInfo.resetRecord();
modelCacheCmd();
})();
}
}, [opInfo.opType, formRef]);
@@ -58,40 +67,30 @@ export default function SyncCustom() {
// https or http
if (/^http/.test(record?.protocol)) {
const res = await http.fetch(filePath, {
method: 'GET',
responseType: isJson ? 1 : 2,
});
if (res.ok) {
if (isJson) {
// parse json
await modelCacheSet(setTag(Array.isArray(res?.data) ? res?.data : []), file);
} else {
// parse csv
const list: Record<string, string>[] = await invoke('parse_prompt', { data: res?.data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] }));
await modelCacheSet(fmtList, file);
}
const data = await invoke('sync_user_prompts', { url: filePath, dataType: record?.ext });
if (data) {
await modelCacheSet(data as [], file);
await modelCacheCmd();
message.success('ChatGPT Prompts data has been synchronized!');
return true;
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
return false;
}
return;
}
// local
if (isJson) {
// parse json
const data = await readJSON(filePath, { isRoot: true });
await modelCacheSet(setTag(Array.isArray(data) ? data : []), file);
await modelCacheSet(fmtData(data), file);
} else {
// parse csv
const data = await fs.readTextFile(filePath);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['user-sync'] }));
await modelCacheSet(fmtList, file);
await modelCacheSet(fmtData(list), file);
}
await modelCacheCmd();
return true;
};
const handleOk = () => {
@@ -128,12 +127,12 @@ export default function SyncCustom() {
<Modal
open={isVisible}
onCancel={hide}
title="Model PATH"
title="Sync PATH"
onOk={handleOk}
destroyOnClose
maskClosable={false}
>
<SyncForm ref={formRef} record={opInfo?.opRecord} />
<SyncForm ref={formRef} record={opInfo?.opRecord} type={opInfo.opType} />
</Modal>
</div>
)

View File

@@ -1,13 +1,13 @@
import { useEffect, useState } from 'react';
import { Table, Button, message, Popconfirm } from 'antd';
import { invoke, http, path, shell } from '@tauri-apps/api';
import { Table, Button, Popconfirm } from 'antd';
import { invoke, path, shell } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useTable, { TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot, GITHUB_PROMPTS_CSV_URL, genCmd } from '@/utils';
import { fmtDate, chatRoot } from '@/utils';
import { syncColumns } from './config';
import './index.scss';
@@ -33,24 +33,13 @@ export default function SyncPrompts() {
}, [modelCacheJson.length]);
const handleSync = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList);
opInit(fmtList);
const data = await invoke('sync_prompts', { time: Date.now() });
if (data) {
opInit(data as any[]);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
message.success('ChatGPT Prompts data has been synchronized!');
} else {
message.error('ChatGPT Prompts data sync failed, please try again!');
}
};
@@ -69,6 +58,16 @@ export default function SyncPrompts() {
return (
<div>
<div className="chat-table-btns">
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
<div>
{selectedItems.length > 0 && (
<>
@@ -78,15 +77,6 @@ export default function SyncPrompts() {
</>
)}
</div>
<Popconfirm
title={<span>Data sync will enable all prompts,<br/>are you sure you want to sync?</span>}
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
</div>
<div className="chat-table-tip">
<div className="chat-sync-path">

View File

@@ -10,7 +10,7 @@ export const syncColumns = () => [
// width: 120,
key: 'cmd',
render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag>
<Tag color="#2a2a2a">/{row.cmd ? row.cmd : genCmd(row.act)}</Tag>
),
},
{
@@ -24,7 +24,9 @@ export const syncColumns = () => [
dataIndex: 'tags',
key: 'tags',
// width: 150,
render: () => <Tag>chatgpt-prompts</Tag>,
render: (v: string[]) => (
<span className="chat-prompts-tags">{v?.map(i => <Tag key={i}>{i}</Tag>)}</span>
),
},
{
title: 'Enable',