feat: chatgpt-prompts sync

This commit is contained in:
lencx
2022-12-19 02:56:53 +08:00
parent 02fb4dd3b7
commit c54aec88c0
21 changed files with 367 additions and 23 deletions

View File

@@ -71,3 +71,20 @@ pub fn get_chat_model() -> serde_json::Value {
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap()
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub act: String,
pub prompt: String,
}
#[command]
pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![];
for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap();
list.push(record);
}
list
}

View File

@@ -0,0 +1,123 @@
// https://github.com/tauri-apps/tauri-plugin-fs-extra/blob/dev/src/lib.rs
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use serde::{ser::Serializer, Serialize};
use std::{
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
};
use tauri::command;
#[cfg(unix)]
use std::os::unix::fs::{MetadataExt, PermissionsExt};
#[cfg(windows)]
use std::os::windows::fs::MetadataExt;
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Permissions {
readonly: bool,
#[cfg(unix)]
mode: u32,
}
#[cfg(unix)]
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct UnixMetadata {
dev: u64,
ino: u64,
mode: u32,
nlink: u64,
uid: u32,
gid: u32,
rdev: u64,
blksize: u64,
blocks: u64,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
accessed_at_ms: u64,
created_at_ms: u64,
modified_at_ms: u64,
is_dir: bool,
is_file: bool,
is_symlink: bool,
size: u64,
permissions: Permissions,
#[cfg(unix)]
#[serde(flatten)]
unix: UnixMetadata,
#[cfg(windows)]
file_attributes: u32,
}
fn system_time_to_ms(time: std::io::Result<SystemTime>) -> u64 {
time.map(|t| {
let duration_since_epoch = t.duration_since(UNIX_EPOCH).unwrap();
duration_since_epoch.as_millis() as u64
})
.unwrap_or_default()
}
#[command]
pub async fn metadata(path: PathBuf) -> Result<Metadata> {
let metadata = std::fs::metadata(path)?;
let file_type = metadata.file_type();
let permissions = metadata.permissions();
Ok(Metadata {
accessed_at_ms: system_time_to_ms(metadata.accessed()),
created_at_ms: system_time_to_ms(metadata.created()),
modified_at_ms: system_time_to_ms(metadata.modified()),
is_dir: file_type.is_dir(),
is_file: file_type.is_file(),
is_symlink: file_type.is_symlink(),
size: metadata.len(),
permissions: Permissions {
readonly: permissions.readonly(),
#[cfg(unix)]
mode: permissions.mode(),
},
#[cfg(unix)]
unix: UnixMetadata {
dev: metadata.dev(),
ino: metadata.ino(),
mode: metadata.mode(),
nlink: metadata.nlink(),
uid: metadata.uid(),
gid: metadata.gid(),
rdev: metadata.rdev(),
blksize: metadata.blksize(),
blocks: metadata.blocks(),
},
#[cfg(windows)]
file_attributes: metadata.file_attributes(),
})
}
// #[command]
// pub async fn exists(path: PathBuf) -> bool {
// path.exists()
// }

View File

@@ -1,4 +1,5 @@
pub mod cmd;
pub mod fs_extra;
pub mod menu;
pub mod setup;
pub mod window;

View File

@@ -62,8 +62,10 @@ function init() {
async function cmdTip() {
const chatModelJson = await invoke('get_chat_model') || {};
if (!chatModelJson.data && chatModelJson.data.length <= 0) return;
const data = chatModelJson.data || [];
const user_custom = chatModelJson.user_custom || [];
const sys_sync_prompts = chatModelJson.sys_sync_prompts || [];
const data = [...user_custom, ...sys_sync_prompts];
if (data.length <= 0) return;
const modelDom = document.createElement('div');
modelDom.classList.add('chat-model-cmd-list');
@@ -74,7 +76,7 @@ async function cmdTip() {
}
document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" data-prompt="${encodeURIComponent(v.prompt)}"><b>/${v.cmd}</b><i>${v.act}</i></div>`;
const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.

View File

@@ -7,7 +7,7 @@ mod app;
mod conf;
mod utils;
use app::{cmd, menu, setup};
use app::{cmd, fs_extra, menu, setup};
use conf::{ChatConfJson, ChatState};
use tauri::api::path;
use tauri_plugin_log::{fern::colors::ColoredLevelConfig, LogTarget, LoggerBuilder};
@@ -22,6 +22,7 @@ fn main() {
// https://github.com/tauri-apps/tauri/pull/2736
.plugin(
LoggerBuilder::new()
// .level(log::LevelFilter::Error)
.with_colors(colors)
.targets([
// LogTarget::LogDir,
@@ -44,6 +45,8 @@ fn main() {
cmd::form_msg,
cmd::open_file,
cmd::get_chat_model,
cmd::parse_prompt,
fs_extra::metadata,
])
.setup(setup::init)
.plugin(tauri_plugin_positioner::init())