Merge remote-tracking branch 'origin/main'

This commit is contained in:
denislov 2025-04-23 17:02:37 +08:00
commit 55fdcfe55f
345 changed files with 67541 additions and 9568 deletions

View File

@ -18,7 +18,9 @@ body:
options:
- label: 我理解 Issue 是用于反馈和解决问题的,而非吐槽评论区,将尽可能提供更多信息帮助问题解决。
required: true
- label: 我已经查看了置顶 Issue 并搜索了现有的 [开放Issue](https://github.com/CherryHQ/cherry-studio/issues)和[已关闭Issue](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20),没有找到类似的问题。
- label: 我的问题不是 [常见问题](https://github.com/CherryHQ/cherry-studio/issues/3881) 中的内容。
required: true
- label: 我已经查看了 **置顶 Issue** 并搜索了现有的 [开放Issue](https://github.com/CherryHQ/cherry-studio/issues)和[已关闭Issue](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20),没有找到类似的问题。
required: true
- label: 我填写了简短且清晰明确的标题,以便开发者在翻阅 Issue 列表时能快速确定大致问题。而不是“一个建议”、“卡住了”等。
required: true
@ -48,8 +50,8 @@ body:
id: description
attributes:
label: 错误描述
description: 描述问题时请尽可能详细
placeholder: 告诉我们发生了什么...
description: 描述问题时请尽可能详细。请尽可能提供截图或屏幕录制,以帮助我们更好地理解问题。
placeholder: 告诉我们发生了什么...(记得附上截图/录屏,如果适用)
validations:
required: true
@ -57,12 +59,14 @@ body:
id: reproduction
attributes:
label: 重现步骤
description: 提供详细的重现步骤,以便于我们可以准确地重现问题
description: 提供详细的重现步骤,以便于我们的开发人员可以准确地重现问题。请尽可能为每个步骤提供截图或屏幕录制。
placeholder: |
1. 转到 '...'
2. 点击 '....'
3. 向下滚动到 '....'
4. 看到错误
记得尽可能为每个步骤附上截图/录屏!
validations:
required: true

View File

@ -6,8 +6,8 @@ body:
- type: markdown
attributes:
value: |
Thank you for taking the time to fill out this bug report!
Before submitting this issue, please make sure that you have understood the [FAQ](https://docs.cherry-ai.com/question-contact/questions) and [Knowledge Science](https://docs.cherry-ai.com/question-contact/knowledge)
Thank you for taking the time to fill out this bug report!
Before submitting this issue, please make sure that you have understood the [FAQ](https://docs.cherry-ai.com/question-contact/questions) and [Knowledge Science](https://docs.cherry-ai.com/question-contact/knowledge)
- type: checkboxes
id: checklist
@ -18,7 +18,9 @@ body:
options:
- label: I understand that issues are for feedback and problem solving, not for complaining in the comment section, and will provide as much information as possible to help solve the problem.
required: true
- label: I've looked at pinned issues and searched for existing [Open Issues](https://github.com/CherryHQ/cherry-studio/issues), [Closed Issues](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed), and [Discussions](https://github.com/CherryHQ/cherry-studio/discussions), no similar issue or discussion was found.
- label: My issue is not listed in the [FAQ](https://github.com/CherryHQ/cherry-studio/issues/3881).
required: true
- label: I've looked at **pinned issues** and searched for existing [Open Issues](https://github.com/CherryHQ/cherry-studio/issues), [Closed Issues](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed), and [Discussions](https://github.com/CherryHQ/cherry-studio/discussions), no similar issue or discussion was found.
required: true
- label: I've filled in short, clear headings so that developers can quickly identify a rough idea of what to expect when flipping through the list of issues. And not "a suggestion", "stuck", etc.
required: true

39
.github/workflows/issue-management.yml vendored Normal file
View File

@ -0,0 +1,39 @@
name: "Stale Issue Management"
on:
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
env:
daysBeforeStale: 30 # Number of days of inactivity before marking as stale
daysBeforeClose: 30 # Number of days to wait after marking as stale before closing
jobs:
stale:
if: github.repository_owner == 'CherryHQ'
runs-on: ubuntu-latest
permissions:
actions: write # Workaround for https://github.com/actions/stale/issues/1090
issues: write
# Completely disable stalling for PRs
pull-requests: none
contents: none
steps:
- name: Close inactive issues
uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: ${{ env.daysBeforeStale }}
days-before-close: ${{ env.daysBeforeClose }}
stale-issue-label: "inactive"
stale-issue-message: |
This issue has been inactive for a prolonged period and will be closed automatically in ${{ env.daysBeforeClose }} days.
该问题已长时间处于闲置状态,${{ env.daysBeforeClose }} 天后将自动关闭。
exempt-issue-labels: "pending, Dev Team, enhancement"
days-before-pr-stale: -1 # Completely disable stalling for PRs
days-before-pr-close: -1 # Completely disable closing for PRs
# Temporary to reduce the huge issues number
operations-per-run: 100
debug-only: false

View File

@ -76,7 +76,10 @@ jobs:
- name: Build Windows
if: matrix.os == 'windows-latest'
run: yarn build:win
run: |
yarn build:npm windows
yarn build:win:x64
yarn build:win:arm64
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}

View File

@ -6,7 +6,7 @@ on:
tag:
description: 'Release tag (e.g. v1.0.0)'
required: true
default: 'v0.9.18'
default: 'v1.0.0'
push:
tags:
- v*.*.*
@ -42,6 +42,11 @@ jobs:
with:
node-version: 20
- name: macos-latest dependencies fix
if: matrix.os == 'macos-latest'
run: |
brew install python-setuptools
- name: Install corepack
run: corepack enable && corepack prepare yarn@4.6.0 --activate
@ -75,6 +80,7 @@ jobs:
- name: Build Mac
if: matrix.os == 'macos-latest'
run: |
sudo -H pip install setuptools
yarn build:npm mac
yarn build:mac
env:
@ -88,14 +94,13 @@ jobs:
- name: Build Windows
if: matrix.os == 'windows-latest'
run: yarn build:win
run: |
yarn build:npm windows
yarn build:win
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
- name: Replace spaces in filenames
run: node scripts/replace-spaces.js
- name: Release
uses: ncipollo/release-action@v1
with:

9
.gitignore vendored
View File

@ -35,7 +35,6 @@ Thumbs.db
node_modules
dist
out
build/icons
stats.html
# ENV
@ -47,3 +46,11 @@ local
.aider*
.cursorrules
.cursor/rules
# test
coverage
.vitest-cache
vitest.config.*.timestamp-*
# Sentry Config File
.env.sentry-build-plugin

10
.vscode/settings.json vendored
View File

@ -31,5 +31,13 @@
"[markdown]": {
"files.trimTrailingWhitespace": false
},
"i18n-ally.localesPaths": ["src/renderer/src/i18n"]
"i18n-ally.localesPaths": ["src/renderer/src/i18n/locales"],
"i18n-ally.enabledFrameworks": ["react-i18next", "i18next"],
"i18n-ally.keystyle": "nested", //
"i18n-ally.sortKeys": true, //
"i18n-ally.namespace": true, //
"i18n-ally.enabledParsers": ["ts", "js", "json"], //
"i18n-ally.sourceLanguage": "en-us", //
"i18n-ally.displayLanguage": "zh-cn",
"i18n-ally.fullReloadOnChanged": true //
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,92 @@
diff --git a/out/electron/ElectronFramework.js b/out/electron/ElectronFramework.js
index 5a4b4546870ee9e770d5a50d79790d39baabd268..3f0ac05dfd6bbaeaf5f834341a823718bd10f55c 100644
--- a/out/electron/ElectronFramework.js
+++ b/out/electron/ElectronFramework.js
@@ -55,26 +55,27 @@ async function removeUnusedLanguagesIfNeeded(options) {
if (!wantedLanguages.length) {
return;
}
- const { dir, langFileExt } = getLocalesConfig(options);
+ const { dirs, langFileExt } = getLocalesConfig(options);
// noinspection SpellCheckingInspection
- await (0, tiny_async_pool_1.default)(builder_util_1.MAX_FILE_REQUESTS, await (0, fs_extra_1.readdir)(dir), async (file) => {
- if (!file.endsWith(langFileExt)) {
+ const deletedFiles = async (dir) => {
+ await (0, tiny_async_pool_1.default)(builder_util_1.MAX_FILE_REQUESTS, await (0, fs_extra_1.readdir)(dir), async (file) => {
+ if (!file.endsWith(langFileExt)) {
+ return;
+ }
+ const language = file.substring(0, file.length - langFileExt.length);
+ if (!wantedLanguages.includes(language)) {
+ return fs.rm(path.join(dir, file), { recursive: true, force: true });
+ }
return;
- }
- const language = file.substring(0, file.length - langFileExt.length);
- if (!wantedLanguages.includes(language)) {
- return fs.rm(path.join(dir, file), { recursive: true, force: true });
- }
- return;
- });
+ });
+ };
+ await Promise.all(dirs.map(deletedFiles));
function getLocalesConfig(options) {
const { appOutDir, packager } = options;
if (packager.platform === index_1.Platform.MAC) {
- return { dir: packager.getResourcesDir(appOutDir), langFileExt: ".lproj" };
- }
- else {
- return { dir: path.join(packager.getResourcesDir(appOutDir), "..", "locales"), langFileExt: ".pak" };
+ return { dirs: [packager.getResourcesDir(appOutDir), packager.getMacOsElectronFrameworkResourcesDir(appOutDir)], langFileExt: ".lproj" };
}
+ return { dirs: [path.join(packager.getResourcesDir(appOutDir), "..", "locales")], langFileExt: ".pak" };
}
}
class ElectronFramework {
diff --git a/out/node-module-collector/index.d.ts b/out/node-module-collector/index.d.ts
index 8e808be0fa0d5971b9f9605c8eb88f71630e34b7..1b97dccd8a150a67c4312d2ba4757960e624045b 100644
--- a/out/node-module-collector/index.d.ts
+++ b/out/node-module-collector/index.d.ts
@@ -2,6 +2,6 @@ import { NpmNodeModulesCollector } from "./npmNodeModulesCollector";
import { PnpmNodeModulesCollector } from "./pnpmNodeModulesCollector";
import { detect, PM, getPackageManagerVersion } from "./packageManager";
import { NodeModuleInfo } from "./types";
-export declare function getCollectorByPackageManager(rootDir: string): Promise<NpmNodeModulesCollector | PnpmNodeModulesCollector>;
+export declare function getCollectorByPackageManager(rootDir: string): Promise<PnpmNodeModulesCollector | NpmNodeModulesCollector>;
export declare function getNodeModules(rootDir: string): Promise<NodeModuleInfo[]>;
export { detect, getPackageManagerVersion, PM };
diff --git a/out/platformPackager.d.ts b/out/platformPackager.d.ts
index 2df1ba2725c54c7b0e8fed67ab52e94f0cdb17bc..c7ff756564cfd216d2c7d8f72f367527010c06f9 100644
--- a/out/platformPackager.d.ts
+++ b/out/platformPackager.d.ts
@@ -67,6 +67,7 @@ export declare abstract class PlatformPackager<DC extends PlatformSpecificBuildO
getElectronSrcDir(dist: string): string;
getElectronDestinationDir(appOutDir: string): string;
getResourcesDir(appOutDir: string): string;
+ getMacOsElectronFrameworkResourcesDir(appOutDir: string): string;
getMacOsResourcesDir(appOutDir: string): string;
private checkFileInPackage;
private sanityCheckPackage;
diff --git a/out/platformPackager.js b/out/platformPackager.js
index 6f799ce0d1cdb5f0b18a9c8187b2db84b3567aa9..879248e6c6786d3473e1a80e3930d3a8d0190aab 100644
--- a/out/platformPackager.js
+++ b/out/platformPackager.js
@@ -465,12 +465,13 @@ class PlatformPackager {
if (this.platform === index_1.Platform.MAC) {
return this.getMacOsResourcesDir(appOutDir);
}
- else if ((0, Framework_1.isElectronBased)(this.info.framework)) {
+ if ((0, Framework_1.isElectronBased)(this.info.framework)) {
return path.join(appOutDir, "resources");
}
- else {
- return appOutDir;
- }
+ return appOutDir;
+ }
+ getMacOsElectronFrameworkResourcesDir(appOutDir) {
+ return path.join(appOutDir, `${this.appInfo.productFilename}.app`, "Contents", "Frameworks", "Electron Framework.framework", "Resources");
}
getMacOsResourcesDir(appOutDir) {
return path.join(appOutDir, `${this.appInfo.productFilename}.app`, "Contents", "Resources");

View File

@ -0,0 +1,17 @@
diff --git a/index.js b/index.js
index 4e8423491ab51a9eb9fee22182e4ea0fcc3d3d3b..2846c5d4354c130d478dc99565b3ecd6d85b7d2e 100644
--- a/index.js
+++ b/index.js
@@ -19,7 +19,11 @@ function requireNative() {
break;
}
}
- return require(`@libsql/${target}`);
+ if (target === "win32-arm64-msvc") {
+ return require(`@strongtz/win32-arm64-msvc`);
+ } else {
+ return require(`@libsql/${target}`);
+ }
}
const {

View File

@ -1,8 +1,8 @@
diff --git a/core.js b/core.js
index e75a18281ce8f051990c5a50bc1076afdddf91a3..e62f796791a155f23d054e74a429516c14d6e11b 100644
index ebb071d31cd5a14792b62814df072c5971e83300..31e1062d4a7f2422ffec79cf96a35dbb69fe89cb 100644
--- a/core.js
+++ b/core.js
@@ -156,7 +156,7 @@ class APIClient {
@@ -157,7 +157,7 @@ class APIClient {
Accept: 'application/json',
'Content-Type': 'application/json',
'User-Agent': this.getUserAgent(),
@ -12,10 +12,10 @@ index e75a18281ce8f051990c5a50bc1076afdddf91a3..e62f796791a155f23d054e74a429516c
};
}
diff --git a/core.mjs b/core.mjs
index fcef58eb502664c41a77483a00db8adaf29b2817..18c5d6ed4be86b3640931277bdc27700006764d7 100644
index 9c1a0264dcd73a85de1cf81df4efab9ce9ee2ab7..33f9f1f237f2eb2667a05dae1a7e3dc916f6bfff 100644
--- a/core.mjs
+++ b/core.mjs
@@ -149,7 +149,7 @@ export class APIClient {
@@ -150,7 +150,7 @@ export class APIClient {
Accept: 'application/json',
'Content-Type': 'application/json',
'User-Agent': this.getUserAgent(),

View File

@ -0,0 +1,32 @@
diff --git a/dist/index.js b/dist/index.js
index 663919ac5bb4f9147c5c1b09bd2e379586266a4b..88ff8873ac5beb5eb293f7e741a92fb15b00960c 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -20,21 +20,21 @@ function getSystemProxy() {
else if (process.platform === 'darwin') {
const proxySettings = yield mac_system_proxy_1.getMacSystemProxy();
const noProxy = proxySettings.ExceptionsList || [];
- if (proxySettings.HTTPSEnable && proxySettings.HTTPSProxy && proxySettings.HTTPSPort) {
+ if (proxySettings.HTTPEnable && proxySettings.HTTPProxy && proxySettings.HTTPPort) {
return {
- proxyUrl: `https://${proxySettings.HTTPSProxy}:${proxySettings.HTTPSPort}`,
+ proxyUrl: `http://${proxySettings.HTTPProxy}:${proxySettings.HTTPPort}`,
noProxy
};
}
- else if (proxySettings.HTTPEnable && proxySettings.HTTPProxy && proxySettings.HTTPPort) {
+ else if (proxySettings.SOCKSEnable && proxySettings.SOCKSProxy && proxySettings.SOCKSPort) {
return {
- proxyUrl: `http://${proxySettings.HTTPProxy}:${proxySettings.HTTPPort}`,
+ proxyUrl: `socks://${proxySettings.SOCKSProxy}:${proxySettings.SOCKSPort}`,
noProxy
};
}
- else if (proxySettings.SOCKSEnable && proxySettings.SOCKSProxy && proxySettings.SOCKSPort) {
+ else if (proxySettings.HTTPSEnable && proxySettings.HTTPSProxy && proxySettings.HTTPSPort) {
return {
- proxyUrl: `socks://${proxySettings.SOCKSProxy}:${proxySettings.SOCKSPort}`,
+ proxyUrl: `http://${proxySettings.HTTPSProxy}:${proxySettings.HTTPSPort}`,
noProxy
};
}

File diff suppressed because one or more lines are too long

111
LICENSE
View File

@ -1,62 +1,87 @@
**许可协议**
**许可协议 (Licensing)**
软件采用 Apache License 2.0 许可。除 Apache License 2.0 规定的条款外,您在使用 Cherry Studio 时还应遵守以下附加条款:
项目采用**区分用户的双重许可 (User-Segmented Dual Licensing)** 模式。
**一. 商用许可**
**核心原则:**
在以下任何一种情况下,您需要联系我们并获得明确的书面商业授权后,方可继续使用 Cherry Studio 材料:
* **个人用户 和 10人及以下企业/组织:** 默认适用 **GNU Affero 通用公共许可证 v3.0 (AGPLv3)**。
* **超过10人的企业/组织:** **必须** 获取 **商业许可证 (Commercial License)**。
1. **修改与衍生** 您对 Cherry Studio 材料进行修改或基于其进行衍生开发包括但不限于修改应用名称、Logo、代码、功能、界面数据等
2. **企业服务** 在您的企业内部,或为企业客户提供基于 CherryStudio 的服务,且该服务支持 10 人及以上累计用户使用。
3. **硬件捆绑销售** 您将 Cherry Studio 预装或集成到硬件设备或产品中进行捆绑销售。
4. **政府或教育机构大规模采购** 您的使用场景属于政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
5. **面向公众的公有云服务**:基于 Cherry Studio提供面向公众的公有云服务。
**二. 贡献者协议**
作为 Cherry Studio 的贡献者,您应当同意以下条款:
1. **许可调整**:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。
2. **商业用途**:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。
**三. 其他条款**
1. 本协议条款的解释权归 Cherry Studio 开发者所有。
2. 本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。
如有任何问题或需申请商业授权,请联系 Cherry Studio 开发团队。
除上述特定条件外,其他所有权利和限制均遵循 Apache License 2.0。有关 Apache License 2.0 的详细信息,请访问 http://www.apache.org/licenses/LICENSE-2.0。
定义“10人及以下”
指在您的组织包括公司、非营利组织、政府机构、教育机构等任何实体能够访问、使用或以任何方式直接或间接受益于本软件Cherry Studio功能的个人总数不超过10人。这包括但不限于开发者、测试人员、运营人员、最终用户、通过集成系统间接使用者等。
---
**1. 开源许可证 (Open Source License): AGPLv3 - 适用于个人及10人及以下组织**
**License Agreement**
* 如果您是个人用户或者您的组织满足上述“10人及以下”的定义您可以在 **AGPLv3** 的条款下自由使用、修改和分发 Cherry Studio。AGPLv3 的完整文本可以访问 [https://www.gnu.org/licenses/agpl-3.0.html](https://www.gnu.org/licenses/agpl-3.0.html) 获取。
* **核心义务:** AGPLv3 的一个关键要求是,如果您修改了 Cherry Studio 并通过网络提供服务,或者分发了修改后的版本,您必须以 AGPLv3 许可证向接收者提供相应的**完整源代码**。即使您符合“10人及以下”的标准如果您希望避免此源代码公开义务您也需要考虑获取商业许可证见下文
* 使用前请务必仔细阅读并理解 AGPLv3 的所有条款。
This software is licensed under the Apache License 2.0. In addition to the terms stipulated by the Apache License 2.0, you must comply with the following supplementary terms when using Cherry Studio:
**2. 商业许可证 (Commercial License) - 适用于超过10人的组织或希望规避 AGPLv3 义务的用户**
**I. Commercial Licensing**
* **强制要求:** 如果您的组织**不**满足上述“10人及以下”的定义即有11人或更多人可以访问、使用或受益于本软件您**必须**联系我们获取并签署一份商业许可证才能使用 Cherry Studio。
* **自愿选择:** 即使您的组织满足“10人及以下”的条件但如果您的使用场景**无法满足 AGPLv3 的条款要求**(特别是关于**源代码公开**的义务),或者您需要 AGPLv3 **未提供**的特定商业条款(如保证、赔偿、无 Copyleft 限制等),您也**必须**联系我们获取并签署一份商业许可证。
* **需要商业许可证的常见情况包括(但不限于):**
* 您的组织规模超过10人。
* (无论组织规模)您希望分发修改过的 Cherry Studio 版本,但**不希望**根据 AGPLv3 公开您修改部分的源代码。
* (无论组织规模)您希望基于修改过的 Cherry Studio 提供网络服务SaaS但**不希望**根据 AGPLv3 向服务使用者提供修改后的源代码。
* (无论组织规模)您的公司政策、客户合同或项目要求不允许使用 AGPLv3 许可的软件,或要求闭源分发及保密。
* 商业许可证将为您提供豁免 AGPLv3 义务(如源代码公开)的权利,并可能包含额外的商业保障条款。
* **获取商业许可:** 请通过邮箱 **bd@cherry-ai.com** 联系 Cherry Studio 开发团队洽谈商业授权事宜。
You must contact us and obtain explicit written commercial authorization to continue using Cherry Studio materials under any of the following circumstances:
**3. 贡献 (Contributions)**
1. **Modifications and Derivatives:** You modify Cherry Studio materials or perform derivative development based on them (including but not limited to changing the applications name, logo, code, functionality, user interface, data, etc.).
2. **Enterprise Services:** You use Cherry Studio internally within your enterprise, or you provide Cherry Studio-based services for enterprise customers, and such services support cumulative usage by 10 or more users.
3. **Hardware Bundling and Sales:** You pre-install or integrate Cherry Studio into hardware devices or products for bundled sale.
4. **Large-scale Procurement by Government or Educational Institutions:** Your usage scenario involves large-scale procurement projects by government or educational institutions, especially in cases involving sensitive requirements such as security and data privacy.
5. **Public Cloud Services:** You provide public cloud-based product services utilizing Cherry Studio.
* 我们欢迎社区对 Cherry Studio 的贡献。所有向本项目提交的贡献都将被视为在 **AGPLv3** 许可证下提供。
* 通过向本项目提交贡献(例如通过 Pull Request即表示您同意您的代码以 AGPLv3 许可证授权给本项目及所有后续使用者(无论这些使用者最终遵循 AGPLv3 还是商业许可)。
* 您也理解并同意,您的贡献可能会被包含在根据商业许可证分发的 Cherry Studio 版本中。
**II. Contributor Agreement**
**4. 其他条款 (Other Terms)**
As a contributor to Cherry Studio, you must agree to the following terms:
* 关于商业许可证的具体条款和条件,以双方签署的正式商业许可协议为准。
* 项目维护者保留根据需要更新本许可政策(包括用户规模定义和阈值)的权利。相关更新将通过项目官方渠道(如代码仓库、官方网站)进行通知。
1. **License Adjustments:** The producer reserves the right to adjust the open-source license as necessary, making it more strict or permissive.
2. **Commercial Usage:** Your contributed code may be used commercially, including but not limited to cloud business operations.
---
**III. Other Terms**
**Licensing**
1. Cherry Studio developers reserve the right of final interpretation of these agreement terms.
2. This agreement may be updated according to practical circumstances, and users will be notified of updates through this software.
This project employs a **User-Segmented Dual Licensing** model.
If you have any questions or need to apply for commercial authorization, please contact the Cherry Studio development team.
**Core Principle:**
Other than these specific conditions, all remaining rights and restrictions follow the Apache License 2.0. For more detailed information regarding Apache License 2.0, please visit http://www.apache.org/licenses/LICENSE-2.0.
* **Individual Users and Organizations with 10 or Fewer Individuals:** Governed by default under the **GNU Affero General Public License v3.0 (AGPLv3)**.
* **Organizations with More Than 10 Individuals:** **Must** obtain a **Commercial License**.
Definition: "10 or Fewer Individuals"
Refers to any organization (including companies, non-profits, government agencies, educational institutions, etc.) where the total number of individuals who can access, use, or in any way directly or indirectly benefit from the functionality of this software (Cherry Studio) does not exceed 10. This includes, but is not limited to, developers, testers, operations staff, end-users, and indirect users via integrated systems.
---
**1. Open Source License: AGPLv3 - For Individuals and Organizations of 10 or Fewer**
* If you are an individual user, or if your organization meets the "10 or Fewer Individuals" definition above, you are free to use, modify, and distribute Cherry Studio under the terms of the **AGPLv3**. The full text of the AGPLv3 can be found in the LICENSE file at [https://www.gnu.org/licenses/agpl-3.0.html](https://www.gnu.org/licenses/agpl-3.0.html).
* **Core Obligation:** A key requirement of the AGPLv3 is that if you modify Cherry Studio and make it available over a network, or distribute the modified version, you must provide the **complete corresponding source code** under the AGPLv3 license to the recipients. Even if you qualify under the "10 or Fewer Individuals" rule, if you wish to avoid this source code disclosure obligation, you will need to obtain a Commercial License (see below).
* Please read and understand the full terms of the AGPLv3 carefully before use.
**2. Commercial License - For Organizations with More Than 10 Individuals, or Users Needing to Avoid AGPLv3 Obligations**
* **Mandatory Requirement:** If your organization does **not** meet the "10 or Fewer Individuals" definition above (i.e., 11 or more individuals can access, use, or benefit from the software), you **must** contact us to obtain and execute a Commercial License to use Cherry Studio.
* **Voluntary Option:** Even if your organization meets the "10 or Fewer Individuals" condition, if your intended use case **cannot comply with the terms of the AGPLv3** (particularly the obligations regarding **source code disclosure**), or if you require specific commercial terms **not offered** by the AGPLv3 (such as warranties, indemnities, or freedom from copyleft restrictions), you also **must** contact us to obtain and execute a Commercial License.
* **Common scenarios requiring a Commercial License include (but are not limited to):**
* Your organization has more than 10 individuals who can access, use, or benefit from the software.
* (Regardless of organization size) You wish to distribute a modified version of Cherry Studio but **do not want** to disclose the source code of your modifications under AGPLv3.
* (Regardless of organization size) You wish to provide a network service (SaaS) based on a modified version of Cherry Studio but **do not want** to provide the modified source code to users of the service under AGPLv3.
* (Regardless of organization size) Your corporate policies, client contracts, or project requirements prohibit the use of AGPLv3-licensed software or mandate closed-source distribution and confidentiality.
* The Commercial License grants you rights exempting you from AGPLv3 obligations (like source code disclosure) and may include additional commercial assurances.
* **Obtaining a Commercial License:** Please contact the Cherry Studio development team via email at **bd@cherry-ai.com** to discuss commercial licensing options.
**3. Contributions**
* We welcome community contributions to Cherry Studio. All contributions submitted to this project are considered to be offered under the **AGPLv3** license.
* By submitting a contribution to this project (e.g., via a Pull Request), you agree to license your code under the AGPLv3 to the project and all its downstream users (regardless of whether those users ultimately operate under AGPLv3 or a Commercial License).
* You also understand and agree that your contribution may be included in distributions of Cherry Studio offered under our commercial license.
**4. Other Terms**
* The specific terms and conditions of the Commercial License are governed by the formal commercial license agreement signed by both parties.
* The project maintainers reserve the right to update this licensing policy (including the definition and threshold for user count) as needed. Updates will be communicated through official project channels (e.g., code repository, official website).

View File

@ -13,7 +13,7 @@
Cherry Studio is a desktop client that supports for multiple LLM providers, available on Windows, Mac and Linux.
👏 Join [Telegram Group](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
👏 Join [Telegram Group](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
❤️ Like Cherry Studio? Give it a star 🌟 or [Sponsor](docs/sponsor.md) to support the development!
@ -23,14 +23,12 @@ https://docs.cherry-ai.com
# 🌠 Screenshot
![](https://github.com/user-attachments/assets/28585d83-4bf0-4714-b561-8c7bf57cc600)
![](https://github.com/user-attachments/assets/8576863a-f632-4776-bc12-657eeced9da3)
![](https://github.com/user-attachments/assets/790790d7-b462-48dd-bde1-91c1697a4648)
![](https://github.com/user-attachments/assets/082efa42-c4df-4863-a9cb-80435cecce0f)
![](https://github.com/user-attachments/assets/f8411a65-c51f-47d3-9273-62ae384cc6f1)
![](https://github.com/user-attachments/assets/0d235b3e-65ae-45ab-987f-8dbe003c52be)
# 🌟 Key Features
![](https://github.com/user-attachments/assets/7b4f2f78-5cbe-4be8-9aec-f98d8405a505)
1. **Diverse LLM Provider Support**:
- ☁️ Major LLM Cloud Services: OpenAI, Gemini, Anthropic, and more
@ -84,9 +82,11 @@ https://docs.cherry-ai.com
# 🌈 Theme
- Theme Gallery: https://cherrycss.com
- Theme Gallery: https://cherrycss.com
- Aero Theme: https://github.com/hakadao/CherryStudio-Aero
- PaperMaterial Theme: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
- Claude dynamic-style: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
- Maple Neon Theme: https://github.com/BoningtonChen/CherryStudio_themes
Welcome PR for more themes

BIN
build/icons/1024x1024.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

BIN
build/icons/128x128.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB

BIN
build/icons/16x16.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 621 B

BIN
build/icons/24x24.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

BIN
build/icons/256x256.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

BIN
build/icons/32x32.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
build/icons/48x48.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
build/icons/512x512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

BIN
build/icons/64x64.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

@ -1,8 +1,8 @@
# provider: generic
# url: http://127.0.0.1:8080
# updaterCacheDirName: cherry-studio-updater
provider: github
repo: cherry-studio
owner: kangfenmao
# provider: generic
# url: https://cherrystudio.ocool.online
# provider: github
# repo: cherry-studio
# owner: kangfenmao
provider: generic
url: https://releases.cherry-ai.com

View File

@ -14,7 +14,7 @@
Cherry Studio は、複数の LLM プロバイダーをサポートするデスクトップクライアントで、Windows、Mac、Linux で利用可能です。
👏 [Telegram](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
👏 [Telegram](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
❤️ Cherry Studio をお気に入りにしましたか?小さな星をつけてください 🌟 または [スポンサー](sponsor.md) をして開発をサポートしてください!❤️
@ -24,14 +24,12 @@ https://docs.cherry-ai.com
# 🌠 スクリーンショット
![](https://github.com/user-attachments/assets/28585d83-4bf0-4714-b561-8c7bf57cc600)
![](https://github.com/user-attachments/assets/8576863a-f632-4776-bc12-657eeced9da3)
![](https://github.com/user-attachments/assets/790790d7-b462-48dd-bde1-91c1697a4648)
![](https://github.com/user-attachments/assets/082efa42-c4df-4863-a9cb-80435cecce0f)
![](https://github.com/user-attachments/assets/f8411a65-c51f-47d3-9273-62ae384cc6f1)
![](https://github.com/user-attachments/assets/0d235b3e-65ae-45ab-987f-8dbe003c52be)
# 🌟 主な機能
![](https://github.com/user-attachments/assets/7b4f2f78-5cbe-4be8-9aec-f98d8405a505)
1. **多様な LLM サービス対応**
- ☁️ 主要な LLM クラウドサービス対応OpenAI、Gemini、Anthropic など
@ -85,8 +83,11 @@ https://docs.cherry-ai.com
# 🌈 テーマ
テーマギャラリー: https://cherrycss.com
Aero テーマ: https://github.com/hakadao/CherryStudio-Aero
- テーマギャラリー: https://cherrycss.com
- Aero テーマ: https://github.com/hakadao/CherryStudio-Aero
- PaperMaterial テーマ: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
- Claude テーマ: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
- メープルネオンテーマ: https://github.com/BoningtonChen/CherryStudio_themes
より多くのテーマのPRを歓迎します

View File

@ -14,7 +14,7 @@
Cherry Studio 是一款支持多个大语言模型LLM服务商的桌面客户端兼容 Windows、Mac 和 Linux 系统。
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
❤️ 喜欢 Cherry Studio? 点亮小星星 🌟 或 [赞助开发者](sponsor.md)! ❤️
@ -24,14 +24,12 @@ https://docs.cherry-ai.com
# 🌠 界面
![](https://github.com/user-attachments/assets/28585d83-4bf0-4714-b561-8c7bf57cc600)
![](https://github.com/user-attachments/assets/8576863a-f632-4776-bc12-657eeced9da3)
![](https://github.com/user-attachments/assets/790790d7-b462-48dd-bde1-91c1697a4648)
![](https://github.com/user-attachments/assets/082efa42-c4df-4863-a9cb-80435cecce0f)
![](https://github.com/user-attachments/assets/f8411a65-c51f-47d3-9273-62ae384cc6f1)
![](https://github.com/user-attachments/assets/0d235b3e-65ae-45ab-987f-8dbe003c52be)
# 🌟 主要特性
![](https://github.com/user-attachments/assets/995910f3-177a-4d1e-97ea-04e3b009ba36)
1. **多样化 LLM 服务支持**
- ☁️ 支持主流 LLM 云服务OpenAI、Gemini、Anthropic、硅基流动等
@ -85,8 +83,11 @@ https://docs.cherry-ai.com
# 🌈 主题
主题库https://cherrycss.com
Aero 主题https://github.com/hakadao/CherryStudio-Aero
- 主题库https://cherrycss.com
- Aero 主题https://github.com/hakadao/CherryStudio-Aero
- PaperMaterial 主题: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
- 仿Claude 主题: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
- 霓虹枫叶字体主题: https://github.com/BoningtonChen/CherryStudio_themes
欢迎 PR 更多主题

View File

@ -1,5 +1,14 @@
appId: com.kangfenmao.CherryStudio
productName: Cherry Studio
electronLanguages:
- zh-CN
- zh-TW
- en-US
- ja # macOS/linux/win
- ru # macOS/linux/win
- zh_CN # for macOS
- zh_TW # for macOS
- en # for macOS
directories:
buildResources: build
files:
@ -29,21 +38,24 @@ files:
- '!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}'
asarUnpack:
- resources/**
- '**/*.{node,dll,metal,exp,lib}'
- '**/*.{metal,exp,lib}'
win:
executableName: Cherry Studio
artifactName: ${productName}-${version}-portable.${ext}
artifactName: ${productName}-${version}-${arch}-setup.${ext}
target:
- target: nsis
- target: portable
nsis:
artifactName: ${productName}-${version}-setup.${ext}
artifactName: ${productName}-${version}-${arch}-setup.${ext}
shortcutName: ${productName}
uninstallDisplayName: ${productName}
createDesktopShortcut: always
allowToChangeInstallationDirectory: true
oneClick: false
include: build/nsis-installer.nsh
buildUniversalInstaller: false
portable:
artifactName: ${productName}-${version}-${arch}-portable.${ext}
mac:
entitlementsInherit: build/entitlements.mac.plist
notarize: false
@ -55,34 +67,31 @@ mac:
- NSDownloadsFolderUsageDescription: Application requests access to the user's Downloads folder.
target:
- target: dmg
arch:
- arm64
- x64
- target: zip
arch:
- arm64
- x64
linux:
artifactName: ${productName}-${version}-${arch}.${ext}
target:
- target: AppImage
arch:
- arm64
- x64
maintainer: electronjs.org
category: Utility
desktop:
entry:
StartupWMClass: CherryStudio
publish:
# provider: generic
# url: https://cherrystudio.ocool.online
provider: github
repo: cherry-studio
owner: CherryHQ
provider: generic
url: https://releases.cherry-ai.com
electronDownload:
mirror: https://npmmirror.com/mirrors/electron/
afterPack: scripts/after-pack.js
afterSign: scripts/notarize.js
artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo:
releaseNotes: |
小程序支持多开
支持 GPT-4o 图像生成
修复 MCP 服务器无法使用问题
修正语言及本地化错误
Windows ARM 更新跳转到官网下载
改进系统代理处理和初始化逻辑
修复 MCP 服务请求头不生效问题
移除搜索增强模式
优化消息渲染速度
修复备份大文件失败问题
修复网络搜索导致卡顿问题

View File

@ -1,4 +1,5 @@
import react from '@vitejs/plugin-react'
import { sentryVitePlugin } from '@sentry/vite-plugin'
import viteReact from '@vitejs/plugin-react'
import { defineConfig, externalizeDepsPlugin } from 'electron-vite'
import { resolve } from 'path'
import { visualizer } from 'rollup-plugin-visualizer'
@ -6,7 +7,7 @@ import { visualizer } from 'rollup-plugin-visualizer'
const visualizerPlugin = (type: 'renderer' | 'main') => {
return process.env[`VISUALIZER_${type.toUpperCase()}`] ? [visualizer({ open: true })] : []
}
// const viteReact = await import('@vitejs/plugin-react')
export default defineConfig({
main: {
plugins: [
@ -42,11 +43,16 @@ export default defineConfig({
}
},
preload: {
plugins: [externalizeDepsPlugin()]
plugins: [externalizeDepsPlugin()],
resolve: {
alias: {
'@shared': resolve('packages/shared')
}
}
},
renderer: {
plugins: [
react({
viteReact({
babel: {
plugins: [
[
@ -61,6 +67,11 @@ export default defineConfig({
]
}
}),
sentryVitePlugin({
authToken: process.env.SENTRY_AUTH_TOKEN,
org: 'cherry-ai',
project: 'cherry-studio'
}),
...visualizerPlugin('renderer')
],
resolve: {
@ -70,7 +81,7 @@ export default defineConfig({
}
},
optimizeDeps: {
exclude: ['chunk-PZ64DZKH.js', 'chunk-JMKENWIY.js', 'chunk-UXYB6GHG.js', 'chunk-ALDIEZMG.js', 'chunk-4X6ZJEXY.js']
exclude: []
}
}
})

View File

@ -1,6 +1,6 @@
{
"name": "CherryStudio",
"version": "1.1.14",
"version": "1.2.7",
"private": true,
"description": "A powerful AI assistant for producer.",
"main": "./out/main/index.js",
@ -23,12 +23,13 @@
"build": "npm run typecheck && electron-vite build",
"build:check": "yarn test && yarn typecheck && yarn check:i18n",
"build:unpack": "dotenv npm run build && electron-builder --dir",
"build:win": "dotenv npm run build && electron-builder --win",
"build:win": "dotenv npm run build && electron-builder --win --x64 --arm64",
"build:win:x64": "dotenv npm run build && electron-builder --win --x64",
"build:mac": "dotenv electron-vite build && electron-builder --mac",
"build:win:arm64": "dotenv npm run build && electron-builder --win --arm64",
"build:mac": "dotenv electron-vite build && electron-builder --mac --arm64 --x64",
"build:mac:arm64": "dotenv electron-vite build && electron-builder --mac --arm64",
"build:mac:x64": "dotenv electron-vite build && electron-builder --mac --x64",
"build:linux": "dotenv electron-vite build && electron-builder --linux",
"build:linux": "dotenv electron-vite build && electron-builder --linux --x64 --arm64",
"build:linux:arm64": "dotenv electron-vite build && electron-builder --linux --arm64",
"build:linux:x64": "dotenv electron-vite build && electron-builder --linux --x64",
"build:npm": "node scripts/build-npm.js",
@ -43,7 +44,12 @@
"typecheck:node": "tsc --noEmit -p tsconfig.node.json --composite false",
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
"check:i18n": "node scripts/check-i18n.js",
"test": "npx -y tsx --test src/**/*.test.ts",
"test": "yarn test:renderer",
"test:coverage": "yarn test:renderer:coverage",
"test:node": "npx -y tsx --test src/**/*.test.ts",
"test:renderer": "vitest run",
"test:renderer:ui": "vitest --ui",
"test:renderer:coverage": "vitest run --coverage",
"format": "prettier --write .",
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
"postinstall": "electron-builder install-app-deps",
@ -63,28 +69,43 @@
"@cherrystudio/embedjs-openai": "^0.1.28",
"@electron-toolkit/utils": "^3.0.0",
"@electron/notarize": "^2.5.0",
"@google/generative-ai": "^0.21.0",
"@langchain/community": "^0.3.36",
"@mozilla/readability": "^0.6.0",
"@notionhq/client": "^2.2.15",
"@sentry/electron": "^6.5.0",
"@strongtz/win32-arm64-msvc": "^0.4.7",
"@tryfabric/martian": "^1.2.4",
"@types/react-infinite-scroll-component": "^5.0.0",
"@xyflow/react": "^12.4.4",
"adm-zip": "^0.5.16",
"archiver": "^7.0.1",
"async-mutex": "^0.5.0",
"bufferutil": "^4.0.9",
"color": "^5.0.0",
"diff": "^7.0.0",
"docx": "^9.0.2",
"electron-log": "^5.1.5",
"electron-store": "^8.2.0",
"electron-updater": "^6.3.9",
"electron-window-state": "^5.0.3",
"epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch",
"fast-xml-parser": "^5.0.9",
"extract-zip": "^2.0.1",
"fast-xml-parser": "^5.2.0",
"fetch-socks": "^1.3.2",
"fs-extra": "^11.2.0",
"got-scraping": "^4.1.1",
"jsdom": "^26.0.0",
"markdown-it": "^14.1.0",
"node-stream-zip": "^1.15.0",
"officeparser": "^4.1.1",
"os-proxy-config": "patch:os-proxy-config@npm%3A1.1.1#~/.yarn/patches/os-proxy-config-npm-1.1.1-af9c7574cc.patch",
"proxy-agent": "^6.5.0",
"tar": "^7.4.3",
"turndown": "^7.2.0",
"turndown-plugin-gfm": "^1.0.2",
"undici": "^7.4.0",
"webdav": "^5.8.0",
"ws": "^8.18.1",
"zipread": "^1.3.3"
},
"devDependencies": {
@ -100,15 +121,18 @@
"@emotion/is-prop-valid": "^1.3.1",
"@eslint-react/eslint-plugin": "^1.36.1",
"@eslint/js": "^9.22.0",
"@google/genai": "^0.4.0",
"@google/genai": "patch:@google/genai@npm%3A0.8.0#~/.yarn/patches/@google-genai-npm-0.8.0-450d0d9a7d.patch",
"@hello-pangea/dnd": "^16.6.0",
"@kangfenmao/keyv-storage": "^0.1.0",
"@modelcontextprotocol/sdk": "^1.8.0",
"@modelcontextprotocol/sdk": "^1.9.0",
"@notionhq/client": "^2.2.15",
"@reduxjs/toolkit": "^2.2.5",
"@sentry/react": "^9.13.0",
"@sentry/vite-plugin": "^3.3.1",
"@tavily/core": "patch:@tavily/core@npm%3A0.3.1#~/.yarn/patches/@tavily-core-npm-0.3.1-fe69bf2bea.patch",
"@tryfabric/martian": "^1.2.4",
"@types/adm-zip": "^0",
"@types/diff": "^7",
"@types/fs-extra": "^11",
"@types/lodash": "^4.17.5",
"@types/markdown-it": "^14",
@ -119,7 +143,10 @@
"@types/react-dom": "^19.0.4",
"@types/react-infinite-scroll-component": "^5.0.0",
"@types/tinycolor2": "^1",
"@vitejs/plugin-react": "^4.2.1",
"@types/ws": "^8",
"@vitejs/plugin-react": "^4.4.1",
"@vitest/coverage-v8": "^3.1.1",
"@vitest/ui": "^3.1.1",
"antd": "^5.22.5",
"applescript": "^1.0.0",
"axios": "^1.7.3",
@ -130,7 +157,7 @@
"dexie-react-hooks": "^1.1.7",
"dotenv-cli": "^7.4.2",
"electron": "31.7.6",
"electron-builder": "^24.13.3",
"electron-builder": "26.0.13",
"electron-devtools-installer": "^3.2.0",
"electron-icon-builder": "^2.0.1",
"electron-vite": "^2.3.0",
@ -145,9 +172,11 @@
"i18next": "^23.11.5",
"lint-staged": "^15.5.0",
"lodash": "^4.17.21",
"lru-cache": "^11.1.0",
"lucide-react": "^0.487.0",
"mime": "^4.0.4",
"npx-scope-finder": "^1.2.0",
"openai": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
"openai": "patch:openai@npm%3A4.87.3#~/.yarn/patches/openai-npm-4.87.3-2b30a7685f.patch",
"p-queue": "^8.1.0",
"prettier": "^3.5.3",
"rc-virtual-list": "^3.18.5",
@ -166,25 +195,31 @@
"rehype-katex": "^7.0.1",
"rehype-mathjax": "^7.0.0",
"rehype-raw": "^7.0.0",
"remark-cjk-friendly": "^1.1.0",
"remark-gfm": "^4.0.0",
"remark-math": "^6.0.0",
"rollup-plugin-visualizer": "^5.12.0",
"sass": "^1.77.2",
"shiki": "^1.22.2",
"shiki": "^3.2.1",
"string-width": "^7.2.0",
"styled-components": "^6.1.11",
"tiny-pinyin": "^1.3.2",
"tinycolor2": "^1.6.0",
"tokenx": "^0.4.1",
"typescript": "^5.6.2",
"uuid": "^10.0.0",
"vite": "^5.0.12"
"vite": "6.2.6",
"vitest": "^3.1.1"
},
"resolutions": {
"pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
"@langchain/openai@npm:>=0.1.0 <0.4.0": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
"openai@npm:^4.77.0": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch"
"node-gyp": "^9.1.0",
"libsql@npm:^0.4.4": "patch:libsql@npm%3A0.4.7#~/.yarn/patches/libsql-npm-0.4.7-444e260fb1.patch",
"openai@npm:^4.77.0": "patch:openai@npm%3A4.87.3#~/.yarn/patches/openai-npm-4.87.3-2b30a7685f.patch",
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch",
"app-builder-lib@npm:26.0.13": "patch:app-builder-lib@npm%3A26.0.13#~/.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch"
},
"packageManager": "yarn@4.6.0",
"lint-staged": {

View File

@ -0,0 +1,166 @@
export enum IpcChannel {
App_ClearCache = 'app:clear-cache',
App_SetLaunchOnBoot = 'app:set-launch-on-boot',
App_SetLanguage = 'app:set-language',
App_ShowUpdateDialog = 'app:show-update-dialog',
App_CheckForUpdate = 'app:check-for-update',
App_Reload = 'app:reload',
App_Info = 'app:info',
App_Proxy = 'app:proxy',
App_SetLaunchToTray = 'app:set-launch-to-tray',
App_SetTray = 'app:set-tray',
App_SetTrayOnClose = 'app:set-tray-on-close',
App_RestartTray = 'app:restart-tray',
App_SetTheme = 'app:set-theme',
App_SetCustomCss = 'app:set-custom-css',
App_SetAutoUpdate = 'app:set-auto-update',
App_IsBinaryExist = 'app:is-binary-exist',
App_GetBinaryPath = 'app:get-binary-path',
App_InstallUvBinary = 'app:install-uv-binary',
App_InstallBunBinary = 'app:install-bun-binary',
// Open
Open_Path = 'open:path',
Open_Website = 'open:website',
Minapp = 'minapp',
Config_Set = 'config:set',
Config_Get = 'config:get',
MiniWindow_Show = 'miniwindow:show',
MiniWindow_Hide = 'miniwindow:hide',
MiniWindow_Close = 'miniwindow:close',
MiniWindow_Toggle = 'miniwindow:toggle',
MiniWindow_SetPin = 'miniwindow:set-pin',
// Mcp
Mcp_RemoveServer = 'mcp:remove-server',
Mcp_RestartServer = 'mcp:restart-server',
Mcp_StopServer = 'mcp:stop-server',
Mcp_ListTools = 'mcp:list-tools',
Mcp_CallTool = 'mcp:call-tool',
Mcp_ListPrompts = 'mcp:list-prompts',
Mcp_GetPrompt = 'mcp:get-prompt',
Mcp_ListResources = 'mcp:list-resources',
Mcp_GetResource = 'mcp:get-resource',
Mcp_GetInstallInfo = 'mcp:get-install-info',
Mcp_ServersChanged = 'mcp:servers-changed',
Mcp_ServersUpdated = 'mcp:servers-updated',
//copilot
Copilot_GetAuthMessage = 'copilot:get-auth-message',
Copilot_GetCopilotToken = 'copilot:get-copilot-token',
Copilot_SaveCopilotToken = 'copilot:save-copilot-token',
Copilot_GetToken = 'copilot:get-token',
Copilot_Logout = 'copilot:logout',
Copilot_GetUser = 'copilot:get-user',
// obsidian
Obsidian_GetVaults = 'obsidian:get-vaults',
Obsidian_GetFiles = 'obsidian:get-files',
// nutstore
Nutstore_GetSsoUrl = 'nutstore:get-sso-url',
Nutstore_DecryptToken = 'nutstore:decrypt-token',
Nutstore_GetDirectoryContents = 'nutstore:get-directory-contents',
//aes
Aes_Encrypt = 'aes:encrypt',
Aes_Decrypt = 'aes:decrypt',
Gemini_UploadFile = 'gemini:upload-file',
Gemini_Base64File = 'gemini:base64-file',
Gemini_RetrieveFile = 'gemini:retrieve-file',
Gemini_ListFiles = 'gemini:list-files',
Gemini_DeleteFile = 'gemini:delete-file',
Windows_ResetMinimumSize = 'window:reset-minimum-size',
Windows_SetMinimumSize = 'window:set-minimum-size',
SelectionMenu_Action = 'selection-menu:action',
KnowledgeBase_Create = 'knowledge-base:create',
KnowledgeBase_Reset = 'knowledge-base:reset',
KnowledgeBase_Delete = 'knowledge-base:delete',
KnowledgeBase_Add = 'knowledge-base:add',
KnowledgeBase_Remove = 'knowledge-base:remove',
KnowledgeBase_Search = 'knowledge-base:search',
KnowledgeBase_Rerank = 'knowledge-base:rerank',
//file
File_Open = 'file:open',
File_OpenPath = 'file:openPath',
File_Save = 'file:save',
File_Select = 'file:select',
File_Upload = 'file:upload',
File_Clear = 'file:clear',
File_Read = 'file:read',
File_Delete = 'file:delete',
File_Get = 'file:get',
File_SelectFolder = 'file:selectFolder',
File_Create = 'file:create',
File_Write = 'file:write',
File_SaveImage = 'file:saveImage',
File_Base64Image = 'file:base64Image',
File_Download = 'file:download',
File_Copy = 'file:copy',
File_BinaryFile = 'file:binaryFile',
Fs_Read = 'fs:read',
Export_Word = 'export:word',
Shortcuts_Update = 'shortcuts:update',
// backup
Backup_Backup = 'backup:backup',
Backup_Restore = 'backup:restore',
Backup_BackupToWebdav = 'backup:backupToWebdav',
Backup_RestoreFromWebdav = 'backup:restoreFromWebdav',
Backup_ListWebdavFiles = 'backup:listWebdavFiles',
Backup_CheckConnection = 'backup:checkConnection',
Backup_CreateDirectory = 'backup:createDirectory',
Backup_DeleteWebdavFile = 'backup:deleteWebdavFile',
// zip
Zip_Compress = 'zip:compress',
Zip_Decompress = 'zip:decompress',
// system
System_GetDeviceType = 'system:getDeviceType',
System_GetHostname = 'system:getHostname',
// events
SelectionAction = 'selection-action',
BackupProgress = 'backup-progress',
ThemeChange = 'theme:change',
UpdateDownloadedCancelled = 'update-downloaded-cancelled',
RestoreProgress = 'restore-progress',
UpdateError = 'update-error',
UpdateAvailable = 'update-available',
UpdateNotAvailable = 'update-not-available',
DownloadProgress = 'download-progress',
UpdateDownloaded = 'update-downloaded',
DownloadUpdate = 'download-update',
DirectoryProcessingPercent = 'directory-processing-percent',
FullscreenStatusChanged = 'fullscreen-status-changed',
HideMiniWindow = 'hide-mini-window',
ShowMiniWindow = 'show-mini-window',
MiniWindowReload = 'miniwindow-reload',
ReduxStateChange = 'redux-state-change',
ReduxStoreReady = 'redux-store-ready',
// Search Window
SearchWindow_Open = 'search-window:open',
SearchWindow_Close = 'search-window:close',
SearchWindow_OpenUrl = 'search-window:open-url',
// sentry
Sentry_Init = 'sentry:init'
}

View File

@ -157,3 +157,8 @@ export const ZOOM_SHORTCUTS = [
system: true
}
]
export const KB = 1024
export const MB = 1024 * KB
export const GB = 1024 * MB
export const defaultLanguage = 'en-US'

View File

@ -7,103 +7,145 @@
<link href="https://cdn.jsdelivr.net/npm/tailwindcss@2.2.19/dist/tailwind.min.css" rel="stylesheet" />
</head>
<body class="bg-gray-100 p-8">
<div class="container mx-auto bg-white p-6 rounded shadow-lg">
<h1 class="text-3xl font-bold mb-6 text-center">Cherry Studio 许可协议</h1>
<div class="mb-8">
<h2 class="text-2xl font-semibold mb-4">许可协议</h2>
<p class="mb-4">
本软件采用 <strong>Apache License 2.0</strong> 许可。除 Apache License 2.0 规定的条款外,您在使用 Cherry
Studio 时还应遵守以下附加条款:
</p>
<h3 class="text-xl font-semibold mb-2">一. 商用许可</h3>
<ol class="list-decimal list-inside mb-4">
<li><strong>免费商用</strong>:用户在不修改代码的情况下,可以免费用于商业目的。</li>
<li>
<strong>商业授权</strong>:如果您满足以下任意条件之一,需取得商业授权:
<ol class="list-decimal list-inside ml-4">
<li>对本软件进行二次修改、开发包括但不限于修改应用名称、logo、代码以及功能</li>
<li>为企业客户提供多租户服务,且该服务支持 10 人或以上的使用。</li>
<li>预装或集成到硬件设备或产品中进行捆绑销售。</li>
<li>政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。</li>
</ol>
</li>
</ol>
<h3 class="text-xl font-semibold mb-2">二. 贡献者协议</h3>
<ol class="list-decimal list-inside mb-4">
<li><strong>许可调整</strong>:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。</li>
<li><strong>商业用途</strong>:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。</li>
</ol>
<h3 class="text-xl font-semibold mb-2">三. 其他条款</h3>
<ol class="list-decimal list-inside mb-4">
<li>本协议条款的解释权归 Cherry Studio 开发者所有。</li>
<li>本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。</li>
</ol>
<p class="mb-4">如有任何问题或需申请商业授权,请联系 Cherry Studio 开发团队。</p>
<p>
除上述特定条件外,其他所有权利和限制均遵循 Apache License 2.0。有关 Apache License 2.0 的详细信息,请访问
<a href="http://www.apache.org/licenses/LICENSE-2.0" class="text-blue-500 underline"
>http://www.apache.org/licenses/LICENSE-2.0</a
>
</p>
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>许可协议 | License Agreement</title>
<script src="https://cdn.tailwindcss.com"></script>
</head>
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>许可协议 | License Agreement</title>
<script src="https://cdn.tailwindcss.com"></script>
</head>
<body class="bg-gray-50">
<div class="max-w-4xl mx-auto px-4 py-8">
<!-- 中文版本 -->
<div class="mb-12">
<h1 class="text-3xl font-bold mb-8 text-gray-900">许可协议</h1>
<p class="mb-6 text-gray-700">采用 Apache License 2.0 修改版许可,并附加以下条件:</p>
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">一. 商用许可</h2>
<p class="mb-4 text-gray-700">
在以下任何一种情况下,您需要联系我们并获得明确的书面商业授权后,方可继续使用 Cherry Studio 材料:
</p>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li>
<strong>修改与衍生</strong> 您对 Cherry Studio
材料进行修改或基于其进行衍生开发包括但不限于修改应用名称、Logo、代码、功能、界面数据等
</li>
<li>
<strong>企业服务</strong> 在您的企业内部,或为企业客户提供基于 Cherry Studio 的服务,且该服务支持 10
人及以上累计用户使用。
</li>
<li><strong>硬件捆绑销售</strong> 您将 Cherry Studio 预装或集成到硬件设备或产品中进行捆绑销售。</li>
<li>
<strong>政府或教育机构大规模采购</strong>
您的使用场景属于政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
</li>
<li><strong>面向公众的公有云服务</strong>:基于 Cherry Studio提供面向公众的公有云服务。</li>
</ol>
</section>
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">二. 贡献者协议</h2>
<p class="mb-4 text-gray-700">作为 Cherry Studio 的贡献者,您应当同意以下条款:</p>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li><strong>许可调整</strong>:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。</li>
<li><strong>商业用途</strong>:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。</li>
</ol>
</section>
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">三. 其他条款</h2>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li>本协议条款的解释权归 Cherry Studio 开发者所有。</li>
<li>本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。</li>
</ol>
</section>
</div>
<h1 class="text-3xl font-bold mb-6 text-center">Cherry Studio License</h1>
<div class="mb-8">
<h2 class="text-2xl font-semibold mb-4">License Agreement</h2>
<p class="mb-4">
This software is licensed under the <strong>Apache License 2.0</strong>. In addition to the terms of the
Apache License 2.0, the following additional terms apply to the use of Cherry Studio:
<hr class="my-12 border-gray-300" />
<!-- English Version -->
<div>
<h1 class="text-3xl font-bold mb-8 text-gray-900">License Agreement</h1>
<p class="mb-6 text-gray-700">
This software is licensed under a modified version of the Apache License 2.0, with the following additional
conditions.
</p>
<h3 class="text-xl font-semibold mb-2">I. Commercial Use License</h3>
<ol class="list-decimal list-inside mb-4">
<li>
<strong>Free Commercial Use</strong>: Users can use the software for commercial purposes without modifying
the code.
</li>
<li>
<strong>Commercial License Required</strong>: A commercial license is required if any of the following
conditions are met:
<ol class="list-decimal list-inside ml-4">
<li>
You modify, develop, or alter the software, including but not limited to changes to the application
name, logo, code, or functionality.
</li>
<li>You provide multi-tenant services to enterprise customers with 10 or more users.</li>
<li>
You pre-install or integrate the software into hardware devices or products and bundle it for sale.
</li>
<li>
You are engaging in large-scale procurement for government or educational institutions, especially
involving security, data privacy, or other sensitive requirements.
</li>
</ol>
</li>
</ol>
<h3 class="text-xl font-semibold mb-2">II. Contributor Agreement</h3>
<ol class="list-decimal list-inside mb-4">
<li>
<strong>License Adjustment</strong>: The producer reserves the right to adjust the open-source license as
needed, making it stricter or more lenient.
</li>
<li>
<strong>Commercial Use</strong>: Any code you contribute may be used for commercial purposes, including but
not limited to cloud business operations.
</li>
</ol>
<h3 class="text-xl font-semibold mb-2">III. Other Terms</h3>
<ol class="list-decimal list-inside mb-4">
<li>The interpretation of these terms is subject to the discretion of Cherry Studio developers.</li>
<li>These terms may be updated, and users will be notified through the software when changes occur.</li>
</ol>
<p class="mb-4">
For any questions or to request a commercial license, please contact the Cherry Studio development team.
</p>
<p>
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache
License 2.0. Detailed information about the Apache License 2.0 can be found at
<a href="http://www.apache.org/licenses/LICENSE-2.0" class="text-blue-500 underline"
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">I. Commercial Licensing</h2>
<p class="mb-4 text-gray-700">
You must contact us and obtain explicit written commercial authorization to continue using Cherry Studio
materials under any of the following circumstances:
</p>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li>
<strong>Modifications and Derivatives:</strong> You modify Cherry Studio materials or perform derivative
development based on them (including but not limited to changing the application's name, logo, code,
functionality, user interface, data, etc.).
</li>
<li>
<strong>Enterprise Services:</strong> You use Cherry Studio internally within your enterprise, or you
provide Cherry Studio-based services for enterprise customers, and such services support cumulative usage
by 10 or more users.
</li>
<li>
<strong>Hardware Bundling and Sales:</strong> You pre-install or integrate Cherry Studio into hardware
devices or products for bundled sale.
</li>
<li>
<strong>Large-scale Procurement by Government or Educational Institutions:</strong> Your usage scenario
involves large-scale procurement projects by government or educational institutions, especially in cases
involving sensitive requirements such as security and data privacy.
</li>
<li>
<strong>Public Cloud Services:</strong> You provide public cloud-based product services utilizing Cherry
Studio.
</li>
</ol>
</section>
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">II. Contributor Agreement</h2>
<p class="mb-4 text-gray-700">As a contributor to Cherry Studio, you must agree to the following terms:</p>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li>
<strong>License Adjustments:</strong> The producer reserves the right to adjust the open-source license as
necessary, making it more strict or permissive.
</li>
<li>
<strong>Commercial Usage:</strong> Your contributed code may be used commercially, including but not
limited to cloud business operations.
</li>
</ol>
</section>
<section class="mb-8">
<h2 class="text-xl font-semibold mb-4 text-gray-900">III. Other Terms</h2>
<ol class="list-decimal pl-6 space-y-2 text-gray-700">
<li>Cherry Studio developers reserve the right of final interpretation of these agreement terms.</li>
<li>
This agreement may be updated according to practical circumstances, and users will be notified of updates
through this software.
</li>
</ol>
</section>
<p class="mt-8 text-gray-700">
Other than these specific conditions, all remaining rights and restrictions follow the Apache License 2.0. For
more detailed information regarding Apache License 2.0, please visit
<a href="http://www.apache.org/licenses/LICENSE-2.0" class="text-blue-600 hover:underline"
>http://www.apache.org/licenses/LICENSE-2.0</a
>
>.
</p>
</div>
</div>

View File

@ -1,68 +0,0 @@
<head>
<style>
body {
margin: 0;
}
</style>
<script src="https://unpkg.com/3d-force-graph"></script>
</head>
<body>
<div id="3d-graph"></div>
<script src="./js/bridge.js"></script>
<script type="module">
import { getQueryParam } from './js/utils.js'
const apiUrl = getQueryParam('apiUrl')
const modelId = getQueryParam('modelId')
const jsonUrl = `${apiUrl}/v1/global_graph/${modelId}`
const infoCard = document.createElement('div')
infoCard.style.position = 'fixed'
infoCard.style.backgroundColor = 'rgba(255, 255, 255, 0.9)'
infoCard.style.padding = '8px'
infoCard.style.borderRadius = '4px'
infoCard.style.boxShadow = '0 2px 5px rgba(0,0,0,0.2)'
infoCard.style.fontSize = '12px'
infoCard.style.maxWidth = '200px'
infoCard.style.display = 'none'
infoCard.style.zIndex = '1000'
document.body.appendChild(infoCard)
document.addEventListener('mousemove', (event) => {
infoCard.style.left = `${event.clientX + 10}px`
infoCard.style.top = `${event.clientY + 10}px`
})
const elem = document.getElementById('3d-graph')
const Graph = ForceGraph3D()(elem)
.jsonUrl(jsonUrl)
.nodeAutoColorBy((node) => node.properties.type || 'default')
.nodeVal((node) => node.properties.degree)
.linkWidth((link) => link.properties.weight)
.onNodeHover((node) => {
if (node) {
infoCard.innerHTML = `
<div style="font-weight: bold; margin-bottom: 4px; color: #333;">
${node.properties.title}
</div>
<div style="color: #666;">
${node.properties.description}
</div>`
infoCard.style.display = 'block'
} else {
infoCard.style.display = 'none'
}
})
.onNodeClick((node) => {
const url = `${apiUrl}/v1/references/${modelId}/entities/${node.properties.human_readable_id}`
window.api.minApp({
url,
windowOptions: {
title: node.properties.title,
width: 500,
height: 800
}
})
})
</script>
</body>

View File

@ -6,8 +6,8 @@ const AdmZip = require('adm-zip')
const { downloadWithRedirects } = require('./download')
// Base URL for downloading bun binaries
const BUN_RELEASE_BASE_URL = 'https://github.com/oven-sh/bun/releases/download'
const DEFAULT_BUN_VERSION = '1.2.5' // Default fallback version
const BUN_RELEASE_BASE_URL = 'https://gitcode.com/CherryHQ/bun/releases/download'
const DEFAULT_BUN_VERSION = '1.2.9' // Default fallback version
// Mapping of platform+arch to binary package name
const BUN_PACKAGES = {

View File

@ -7,8 +7,8 @@ const AdmZip = require('adm-zip')
const { downloadWithRedirects } = require('./download')
// Base URL for downloading uv binaries
const UV_RELEASE_BASE_URL = 'https://github.com/astral-sh/uv/releases/download'
const DEFAULT_UV_VERSION = '0.6.6'
const UV_RELEASE_BASE_URL = 'https://gitcode.com/CherryHQ/uv/releases/download'
const DEFAULT_UV_VERSION = '0.6.14'
// Mapping of platform+arch to binary package name
const UV_PACKAGES = {

View File

@ -1,10 +1,8 @@
const { Arch } = require('electron-builder')
const { default: removeLocales } = require('./remove-locales')
const fs = require('fs')
const path = require('path')
exports.default = async function (context) {
await removeLocales(context)
const platform = context.packager.platform.name
const arch = context.arch
@ -18,28 +16,48 @@ exports.default = async function (context) {
'node_modules'
)
removeDifferentArchNodeFiles(node_modules_path, '@libsql', arch === Arch.arm64 ? ['darwin-arm64'] : ['darwin-x64'])
keepPackageNodeFiles(node_modules_path, '@libsql', arch === Arch.arm64 ? ['darwin-arm64'] : ['darwin-x64'])
}
if (platform === 'linux') {
const node_modules_path = path.join(context.appOutDir, 'resources', 'app.asar.unpacked', 'node_modules')
const _arch = arch === Arch.arm64 ? ['linux-arm64-gnu', 'linux-arm64-musl'] : ['linux-x64-gnu', 'linux-x64-musl']
removeDifferentArchNodeFiles(node_modules_path, '@libsql', _arch)
keepPackageNodeFiles(node_modules_path, '@libsql', _arch)
}
if (platform === 'windows') {
const node_modules_path = path.join(context.appOutDir, 'resources', 'app.asar.unpacked', 'node_modules')
removeDifferentArchNodeFiles(node_modules_path, '@libsql', ['win32-x64-msvc'])
if (arch === Arch.arm64) {
keepPackageNodeFiles(node_modules_path, '@strongtz', ['win32-arm64-msvc'])
keepPackageNodeFiles(node_modules_path, '@libsql', ['win32-arm64-msvc'])
}
if (arch === Arch.x64) {
keepPackageNodeFiles(node_modules_path, '@strongtz', ['win32-x64-msvc'])
keepPackageNodeFiles(node_modules_path, '@libsql', ['win32-x64-msvc'])
}
}
}
function removeDifferentArchNodeFiles(nodeModulesPath, packageName, arch) {
/**
* 使用指定架构的 node_modules 文件
* @param {*} nodeModulesPath
* @param {*} packageName
* @param {*} arch
* @returns
*/
function keepPackageNodeFiles(nodeModulesPath, packageName, arch) {
const modulePath = path.join(nodeModulesPath, packageName)
if (!fs.existsSync(modulePath)) {
console.log(`[After Pack] Directory does not exist: ${modulePath}`)
return
}
const dirs = fs.readdirSync(modulePath)
dirs
.filter((dir) => !arch.includes(dir))
.forEach((dir) => {
fs.rmSync(path.join(modulePath, dir), { recursive: true, force: true })
console.log(`Removed dir: ${dir}`, arch)
console.log(`[After Pack] Removed dir: ${dir}`, arch)
})
}

View File

@ -0,0 +1,23 @@
const fs = require('fs')
exports.default = function (buildResult) {
try {
console.log('[artifact build completed] rename artifact file...')
if (!buildResult.file.includes(' ')) {
return
}
let oldFilePath = buildResult.file
if (oldFilePath.includes('-portable') && !oldFilePath.includes('-x64') && !oldFilePath.includes('-arm64')) {
console.log('[artifact build completed] delete portable file:', oldFilePath)
fs.unlinkSync(oldFilePath)
return
}
const newfilePath = oldFilePath.replace(/ /g, '-')
fs.renameSync(oldFilePath, newfilePath)
buildResult.file = newfilePath
console.log(`[artifact build completed] rename file ${oldFilePath} to ${newfilePath} `)
} catch (error) {
console.error('Error renaming file:', error)
}
}

View File

@ -33,6 +33,10 @@ async function downloadNpm(platform) {
'@libsql/win32-x64-msvc',
'https://registry.npmjs.org/@libsql/win32-x64-msvc/-/win32-x64-msvc-0.4.7.tgz'
)
downloadNpmPackage(
'@strongtz/win32-arm64-msvc',
'https://registry.npmjs.org/@strongtz/win32-arm64-msvc/-/win32-arm64-msvc-0.4.7.tgz'
)
}
}

View File

@ -1,58 +0,0 @@
const fs = require('fs')
const path = require('path')
exports.default = async function (context) {
const platform = context.packager.platform.name
// 根据平台确定 locales 目录位置
let resourceDirs = []
if (platform === 'mac') {
// macOS 的语言文件位置
resourceDirs = [
path.join(context.appOutDir, 'Cherry Studio.app', 'Contents', 'Resources'),
path.join(
context.appOutDir,
'Cherry Studio.app',
'Contents',
'Frameworks',
'Electron Framework.framework',
'Resources'
)
]
} else {
// Windows 和 Linux 的语言文件位置
resourceDirs = [path.join(context.appOutDir, 'locales')]
}
// 处理每个资源目录
for (const resourceDir of resourceDirs) {
if (!fs.existsSync(resourceDir)) {
console.log(`Resource directory not found: ${resourceDir}, skipping...`)
continue
}
// 读取所有文件和目录
const items = fs.readdirSync(resourceDir)
// 遍历并删除不需要的语言文件
for (const item of items) {
if (platform === 'mac') {
// 在 macOS 上检查 .lproj 目录
if (item.endsWith('.lproj') && !item.match(/^(en|zh|ru)/)) {
const dirPath = path.join(resourceDir, item)
fs.rmSync(dirPath, { recursive: true, force: true })
console.log(`Removed locale directory: ${item} from ${resourceDir}`)
}
} else {
// 其他平台处理 .pak 文件
if (!item.match(/^(en|zh|ru)/)) {
const filePath = path.join(resourceDir, item)
fs.unlinkSync(filePath)
console.log(`Removed locale file: ${item} from ${resourceDir}`)
}
}
}
}
console.log('Locale cleanup completed!')
}

View File

@ -1,58 +0,0 @@
// replaceSpaces.js
const fs = require('fs')
const path = require('path')
const directory = 'dist'
// 处理文件名中的空格
function replaceFileNames() {
fs.readdir(directory, (err, files) => {
if (err) throw err
files.forEach((file) => {
const oldPath = path.join(directory, file)
const newPath = path.join(directory, file.replace(/ /g, '-'))
fs.stat(oldPath, (err, stats) => {
if (err) throw err
if (stats.isFile() && oldPath !== newPath) {
fs.rename(oldPath, newPath, (err) => {
if (err) throw err
console.log(`Renamed: ${oldPath} -> ${newPath}`)
})
}
})
})
})
}
function replaceYmlContent() {
fs.readdir(directory, (err, files) => {
if (err) throw err
files.forEach((file) => {
if (path.extname(file).toLowerCase() === '.yml') {
const filePath = path.join(directory, file)
fs.readFile(filePath, 'utf8', (err, data) => {
if (err) throw err
// 替换内容
const newContent = data.replace(/Cherry Studio-/g, 'Cherry-Studio-')
// 写回文件
fs.writeFile(filePath, newContent, 'utf8', (err) => {
if (err) throw err
console.log(`Updated content in: ${filePath}`)
})
})
}
})
})
}
// 执行两个操作
replaceFileNames()
replaceYmlContent()

View File

@ -22,7 +22,8 @@ function downloadNpmPackage(packageName, url) {
console.log(`Extracting ${filename}...`)
execSync(`tar -xvf ${filename}`)
execSync(`rm -rf ${filename}`)
execSync(`mv package ${targetDir}`)
execSync(`mkdir -p ${targetDir}`)
execSync(`mv package/* ${targetDir}/`)
} catch (error) {
console.error(`Error processing ${packageName}: ${error.message}`)
if (fs.existsSync(filename)) {

View File

@ -1,10 +1,14 @@
import { electronApp, optimizer } from '@electron-toolkit/utils'
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
import { IpcChannel } from '@shared/IpcChannel'
import { app, ipcMain } from 'electron'
import installExtension, { REDUX_DEVTOOLS } from 'electron-devtools-installer'
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
import Logger from 'electron-log'
import { initSentry } from './integration/sentry'
import { registerIpc } from './ipc'
import { configManager } from './services/ConfigManager'
import mcpService from './services/MCPService'
import { CHERRY_STUDIO_PROTOCOL, handleProtocolUrl, registerProtocolClient } from './services/ProtocolClient'
import { registerShortcuts } from './services/ShortcutService'
import { TrayService } from './services/TrayService'
@ -48,13 +52,17 @@ if (!app.requestSingleInstanceLock()) {
replaceDevtoolsFont(mainWindow)
if (process.env.NODE_ENV === 'development') {
installExtension(REDUX_DEVTOOLS)
installExtension([REDUX_DEVTOOLS, REACT_DEVELOPER_TOOLS])
.then((name) => console.log(`Added Extension: ${name}`))
.catch((err) => console.log('An error occurred: ', err))
}
ipcMain.handle('system:getDeviceType', () => {
ipcMain.handle(IpcChannel.System_GetDeviceType, () => {
return process.platform === 'darwin' ? 'mac' : process.platform === 'win32' ? 'windows' : 'linux'
})
ipcMain.handle(IpcChannel.System_GetHostname, () => {
return require('os').hostname()
})
})
registerProtocolClient(app)
@ -91,6 +99,17 @@ if (!app.requestSingleInstanceLock()) {
app.isQuitting = true
})
app.on('will-quit', async () => {
// event.preventDefault()
try {
await mcpService.cleanup()
} catch (error) {
Logger.error('Error cleaning up MCP service:', error)
}
})
// In this file you can include the rest of your app"s specific main process
// code. You can also put them in separate files and require them here.
}
initSentry()

View File

@ -0,0 +1,14 @@
interface CreateOAuthUrlArgs {
app: string;
}
declare function createOAuthUrl({ app }: CreateOAuthUrlArgs): Promise<string>;
declare function _dont_use_in_prod_createOAuthUrl({ app, }: CreateOAuthUrlArgs): Promise<string>;
interface DecryptSecretArgs {
app: string;
s: string;
}
declare function decryptSecret({ app, s }: DecryptSecretArgs): Promise<string>;
declare function _dont_use_in_prod_decryptSecret({ app, s, }: DecryptSecretArgs): Promise<string>;
export { type CreateOAuthUrlArgs, type DecryptSecretArgs, _dont_use_in_prod_createOAuthUrl, _dont_use_in_prod_decryptSecret, createOAuthUrl, decryptSecret };

View File

@ -1,8 +0,0 @@
declare function decrypt(app: string, s: string): string;
interface Secret {
app: string;
}
declare function createOAuthUrl(secret: Secret): string;
export { type Secret, createOAuthUrl, decrypt };

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,12 @@
import { configManager } from '@main/services/ConfigManager'
import * as Sentry from '@sentry/electron/main'
import { app } from 'electron'
export function initSentry() {
if (configManager.getEnableDataCollection()) {
Sentry.init({
dsn: 'https://194ceab3bd44e686bd3ebda9de3c20fd@o4509184559218688.ingest.us.sentry.io/4509184569442304',
environment: app.isPackaged ? 'production' : 'development'
})
}
}

View File

@ -1,12 +1,15 @@
import fs from 'node:fs'
import { arch } from 'node:os'
import { isMac, isWin } from '@main/constant'
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
import { IpcChannel } from '@shared/IpcChannel'
import { Shortcut, ThemeMode } from '@types'
import { BrowserWindow, ipcMain, session, shell } from 'electron'
import log from 'electron-log'
import { titleBarOverlayDark, titleBarOverlayLight } from './config'
import { initSentry } from './integration/sentry'
import AppUpdater from './services/AppUpdater'
import BackupManager from './services/BackupManager'
import { configManager } from './services/ConfigManager'
@ -20,12 +23,13 @@ import mcpService from './services/MCPService'
import * as NutstoreService from './services/NutstoreService'
import ObsidianVaultService from './services/ObsidianVaultService'
import { ProxyConfig, proxyManager } from './services/ProxyManager'
import { searchService } from './services/SearchService'
import { registerShortcuts, unregisterAllShortcuts } from './services/ShortcutService'
import { TrayService } from './services/TrayService'
import { windowService } from './services/WindowService'
import { getResourcePath } from './utils'
import { decrypt, encrypt } from './utils/aes'
import { getFilesDir } from './utils/file'
import { getConfigDir, getFilesDir } from './utils/file'
import { compress, decompress } from './utils/zip'
const fileManager = new FileStorage()
@ -36,17 +40,20 @@ const obsidianVaultService = new ObsidianVaultService()
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
const appUpdater = new AppUpdater(mainWindow)
ipcMain.handle('app:info', () => ({
ipcMain.handle(IpcChannel.App_Info, () => ({
version: app.getVersion(),
isPackaged: app.isPackaged,
appPath: app.getAppPath(),
filesPath: getFilesDir(),
configPath: getConfigDir(),
appDataPath: app.getPath('userData'),
resourcesPath: getResourcePath(),
logsPath: log.transports.file.getFile().path
logsPath: log.transports.file.getFile().path,
arch: arch(),
isPortable: isWin && 'PORTABLE_EXECUTABLE_DIR' in process.env
}))
ipcMain.handle('app:proxy', async (_, proxy: string) => {
ipcMain.handle(IpcChannel.App_Proxy, async (_, proxy: string) => {
let proxyConfig: ProxyConfig
if (proxy === 'system') {
@ -60,19 +67,19 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
await proxyManager.configureProxy(proxyConfig)
})
ipcMain.handle('app:reload', () => mainWindow.reload())
ipcMain.handle('open:website', (_, url: string) => shell.openExternal(url))
ipcMain.handle(IpcChannel.App_Reload, () => mainWindow.reload())
ipcMain.handle(IpcChannel.Open_Website, (_, url: string) => shell.openExternal(url))
// Update
ipcMain.handle('app:show-update-dialog', () => appUpdater.showUpdateDialog(mainWindow))
ipcMain.handle(IpcChannel.App_ShowUpdateDialog, () => appUpdater.showUpdateDialog(mainWindow))
// language
ipcMain.handle('app:set-language', (_, language) => {
ipcMain.handle(IpcChannel.App_SetLanguage, (_, language) => {
configManager.setLanguage(language)
})
// launch on boot
ipcMain.handle('app:set-launch-on-boot', (_, openAtLogin: boolean) => {
ipcMain.handle(IpcChannel.App_SetLaunchOnBoot, (_, openAtLogin: boolean) => {
// Set login item settings for windows and mac
// linux is not supported because it requires more file operations
if (isWin || isMac) {
@ -81,32 +88,38 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// launch to tray
ipcMain.handle('app:set-launch-to-tray', (_, isActive: boolean) => {
ipcMain.handle(IpcChannel.App_SetLaunchToTray, (_, isActive: boolean) => {
configManager.setLaunchToTray(isActive)
})
// tray
ipcMain.handle('app:set-tray', (_, isActive: boolean) => {
ipcMain.handle(IpcChannel.App_SetTray, (_, isActive: boolean) => {
configManager.setTray(isActive)
})
// to tray on close
ipcMain.handle('app:set-tray-on-close', (_, isActive: boolean) => {
ipcMain.handle(IpcChannel.App_SetTrayOnClose, (_, isActive: boolean) => {
configManager.setTrayOnClose(isActive)
})
ipcMain.handle('app:restart-tray', () => TrayService.getInstance().restartTray())
// auto update
ipcMain.handle(IpcChannel.App_SetAutoUpdate, (_, isActive: boolean) => {
appUpdater.setAutoUpdate(isActive)
configManager.setAutoUpdate(isActive)
})
ipcMain.handle('config:set', (_, key: string, value: any) => {
ipcMain.handle(IpcChannel.App_RestartTray, () => TrayService.getInstance().restartTray())
ipcMain.handle(IpcChannel.Config_Set, (_, key: string, value: any) => {
configManager.set(key, value)
})
ipcMain.handle('config:get', (_, key: string) => {
ipcMain.handle(IpcChannel.Config_Get, (_, key: string) => {
return configManager.get(key)
})
// theme
ipcMain.handle('app:set-theme', (event, theme: ThemeMode) => {
ipcMain.handle(IpcChannel.App_SetTheme, (event, theme: ThemeMode) => {
if (theme === configManager.getTheme()) return
configManager.setTheme(theme)
@ -117,7 +130,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
// 向其他窗口广播主题变化
windows.forEach((win) => {
if (win.webContents.id !== senderWindowId) {
win.webContents.send('theme:change', theme)
win.webContents.send(IpcChannel.ThemeChange, theme)
}
})
@ -125,8 +138,24 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
mainWindow.setTitleBarOverlay(theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight)
})
// custom css
ipcMain.handle(IpcChannel.App_SetCustomCss, (event, css: string) => {
if (css === configManager.getCustomCss()) return
configManager.setCustomCss(css)
// Broadcast to all windows including the mini window
const senderWindowId = event.sender.id
const windows = BrowserWindow.getAllWindows()
// 向其他窗口广播主题变化
windows.forEach((win) => {
if (win.webContents.id !== senderWindowId) {
win.webContents.send('custom-css:update', css)
}
})
})
// clear cache
ipcMain.handle('app:clear-cache', async () => {
ipcMain.handle(IpcChannel.App_ClearCache, async () => {
const sessions = [session.defaultSession, session.fromPartition('persist:webview')]
try {
@ -148,8 +177,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// check for update
ipcMain.handle('app:check-for-update', async () => {
ipcMain.handle(IpcChannel.App_CheckForUpdate, async () => {
// 在 Windows 上,如果架构是 arm64则不检查更新
if (isWin && (arch().includes('arm') || 'PORTABLE_EXECUTABLE_DIR' in process.env)) {
return {
currentVersion: app.getVersion(),
updateInfo: null
}
}
const update = await appUpdater.autoUpdater.checkForUpdates()
return {
currentVersion: appUpdater.autoUpdater.currentVersion,
updateInfo: update?.updateInfo
@ -157,62 +195,51 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// zip
ipcMain.handle('zip:compress', (_, text: string) => compress(text))
ipcMain.handle('zip:decompress', (_, text: Buffer) => decompress(text))
ipcMain.handle(IpcChannel.Zip_Compress, (_, text: string) => compress(text))
ipcMain.handle(IpcChannel.Zip_Decompress, (_, text: Buffer) => decompress(text))
// backup
ipcMain.handle('backup:backup', backupManager.backup)
ipcMain.handle('backup:restore', backupManager.restore)
ipcMain.handle('backup:backupToWebdav', backupManager.backupToWebdav)
ipcMain.handle('backup:restoreFromWebdav', backupManager.restoreFromWebdav)
ipcMain.handle('backup:listWebdavFiles', backupManager.listWebdavFiles)
ipcMain.handle('backup:checkConnection', backupManager.checkConnection)
ipcMain.handle('backup:createDirectory', backupManager.createDirectory)
ipcMain.handle(IpcChannel.Backup_Backup, backupManager.backup)
ipcMain.handle(IpcChannel.Backup_Restore, backupManager.restore)
ipcMain.handle(IpcChannel.Backup_BackupToWebdav, backupManager.backupToWebdav)
ipcMain.handle(IpcChannel.Backup_RestoreFromWebdav, backupManager.restoreFromWebdav)
ipcMain.handle(IpcChannel.Backup_ListWebdavFiles, backupManager.listWebdavFiles)
ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection)
ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory)
ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile)
// file
ipcMain.handle('file:open', fileManager.open)
ipcMain.handle('file:openPath', fileManager.openPath)
ipcMain.handle('file:save', fileManager.save)
ipcMain.handle('file:select', fileManager.selectFile)
ipcMain.handle('file:upload', fileManager.uploadFile)
ipcMain.handle('file:clear', fileManager.clear)
ipcMain.handle('file:read', fileManager.readFile)
ipcMain.handle('file:delete', fileManager.deleteFile)
ipcMain.handle('file:get', fileManager.getFile)
ipcMain.handle('file:selectFolder', fileManager.selectFolder)
ipcMain.handle('file:create', fileManager.createTempFile)
ipcMain.handle('file:write', fileManager.writeFile)
ipcMain.handle('file:saveImage', fileManager.saveImage)
ipcMain.handle('file:base64Image', fileManager.base64Image)
ipcMain.handle('file:download', fileManager.downloadFile)
ipcMain.handle('file:copy', fileManager.copyFile)
ipcMain.handle('file:binaryFile', fileManager.binaryFile)
ipcMain.handle(IpcChannel.File_Open, fileManager.open)
ipcMain.handle(IpcChannel.File_OpenPath, fileManager.openPath)
ipcMain.handle(IpcChannel.File_Save, fileManager.save)
ipcMain.handle(IpcChannel.File_Select, fileManager.selectFile)
ipcMain.handle(IpcChannel.File_Upload, fileManager.uploadFile)
ipcMain.handle(IpcChannel.File_Clear, fileManager.clear)
ipcMain.handle(IpcChannel.File_Read, fileManager.readFile)
ipcMain.handle(IpcChannel.File_Delete, fileManager.deleteFile)
ipcMain.handle(IpcChannel.File_Get, fileManager.getFile)
ipcMain.handle(IpcChannel.File_SelectFolder, fileManager.selectFolder)
ipcMain.handle(IpcChannel.File_Create, fileManager.createTempFile)
ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile)
ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage)
ipcMain.handle(IpcChannel.File_Base64Image, fileManager.base64Image)
ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile)
ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile)
ipcMain.handle(IpcChannel.File_BinaryFile, fileManager.binaryFile)
// fs
ipcMain.handle('fs:read', FileService.readFile)
// minapp
ipcMain.handle('minapp', (_, args) => {
windowService.createMinappWindow({
url: args.url,
parent: mainWindow,
windowOptions: {
...mainWindow.getBounds(),
...args.windowOptions
}
})
})
ipcMain.handle(IpcChannel.Fs_Read, FileService.readFile)
// export
ipcMain.handle('export:word', exportService.exportToWord)
ipcMain.handle(IpcChannel.Export_Word, exportService.exportToWord)
// open path
ipcMain.handle('open:path', async (_, path: string) => {
ipcMain.handle(IpcChannel.Open_Path, async (_, path: string) => {
await shell.openPath(path)
})
// shortcuts
ipcMain.handle('shortcuts:update', (_, shortcuts: Shortcut[]) => {
ipcMain.handle(IpcChannel.Shortcuts_Update, (_, shortcuts: Shortcut[]) => {
configManager.setShortcuts(shortcuts)
// Refresh shortcuts registration
if (mainWindow) {
@ -222,20 +249,20 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// knowledge base
ipcMain.handle('knowledge-base:create', KnowledgeService.create)
ipcMain.handle('knowledge-base:reset', KnowledgeService.reset)
ipcMain.handle('knowledge-base:delete', KnowledgeService.delete)
ipcMain.handle('knowledge-base:add', KnowledgeService.add)
ipcMain.handle('knowledge-base:remove', KnowledgeService.remove)
ipcMain.handle('knowledge-base:search', KnowledgeService.search)
ipcMain.handle('knowledge-base:rerank', KnowledgeService.rerank)
ipcMain.handle(IpcChannel.KnowledgeBase_Create, KnowledgeService.create)
ipcMain.handle(IpcChannel.KnowledgeBase_Reset, KnowledgeService.reset)
ipcMain.handle(IpcChannel.KnowledgeBase_Delete, KnowledgeService.delete)
ipcMain.handle(IpcChannel.KnowledgeBase_Add, KnowledgeService.add)
ipcMain.handle(IpcChannel.KnowledgeBase_Remove, KnowledgeService.remove)
ipcMain.handle(IpcChannel.KnowledgeBase_Search, KnowledgeService.search)
ipcMain.handle(IpcChannel.KnowledgeBase_Rerank, KnowledgeService.rerank)
// window
ipcMain.handle('window:set-minimum-size', (_, width: number, height: number) => {
ipcMain.handle(IpcChannel.Windows_SetMinimumSize, (_, width: number, height: number) => {
mainWindow?.setMinimumSize(width, height)
})
ipcMain.handle('window:reset-minimum-size', () => {
ipcMain.handle(IpcChannel.Windows_ResetMinimumSize, () => {
mainWindow?.setMinimumSize(1080, 600)
const [width, height] = mainWindow?.getSize() ?? [1080, 600]
if (width < 1080) {
@ -244,58 +271,79 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// gemini
ipcMain.handle('gemini:upload-file', GeminiService.uploadFile)
ipcMain.handle('gemini:base64-file', GeminiService.base64File)
ipcMain.handle('gemini:retrieve-file', GeminiService.retrieveFile)
ipcMain.handle('gemini:list-files', GeminiService.listFiles)
ipcMain.handle('gemini:delete-file', GeminiService.deleteFile)
ipcMain.handle(IpcChannel.Gemini_UploadFile, GeminiService.uploadFile)
ipcMain.handle(IpcChannel.Gemini_Base64File, GeminiService.base64File)
ipcMain.handle(IpcChannel.Gemini_RetrieveFile, GeminiService.retrieveFile)
ipcMain.handle(IpcChannel.Gemini_ListFiles, GeminiService.listFiles)
ipcMain.handle(IpcChannel.Gemini_DeleteFile, GeminiService.deleteFile)
// mini window
ipcMain.handle('miniwindow:show', () => windowService.showMiniWindow())
ipcMain.handle('miniwindow:hide', () => windowService.hideMiniWindow())
ipcMain.handle('miniwindow:close', () => windowService.closeMiniWindow())
ipcMain.handle('miniwindow:toggle', () => windowService.toggleMiniWindow())
ipcMain.handle(IpcChannel.MiniWindow_Show, () => windowService.showMiniWindow())
ipcMain.handle(IpcChannel.MiniWindow_Hide, () => windowService.hideMiniWindow())
ipcMain.handle(IpcChannel.MiniWindow_Close, () => windowService.closeMiniWindow())
ipcMain.handle(IpcChannel.MiniWindow_Toggle, () => windowService.toggleMiniWindow())
ipcMain.handle(IpcChannel.MiniWindow_SetPin, (_, isPinned) => windowService.setPinMiniWindow(isPinned))
// aes
ipcMain.handle('aes:encrypt', (_, text: string, secretKey: string, iv: string) => encrypt(text, secretKey, iv))
ipcMain.handle('aes:decrypt', (_, encryptedData: string, iv: string, secretKey: string) =>
ipcMain.handle(IpcChannel.Aes_Encrypt, (_, text: string, secretKey: string, iv: string) =>
encrypt(text, secretKey, iv)
)
ipcMain.handle(IpcChannel.Aes_Decrypt, (_, encryptedData: string, iv: string, secretKey: string) =>
decrypt(encryptedData, iv, secretKey)
)
// Register MCP handlers
ipcMain.handle('mcp:remove-server', mcpService.removeServer)
ipcMain.handle('mcp:restart-server', mcpService.restartServer)
ipcMain.handle('mcp:stop-server', mcpService.stopServer)
ipcMain.handle('mcp:list-tools', mcpService.listTools)
ipcMain.handle('mcp:call-tool', mcpService.callTool)
ipcMain.handle('mcp:get-install-info', mcpService.getInstallInfo)
ipcMain.handle(IpcChannel.Mcp_RemoveServer, mcpService.removeServer)
ipcMain.handle(IpcChannel.Mcp_RestartServer, mcpService.restartServer)
ipcMain.handle(IpcChannel.Mcp_StopServer, mcpService.stopServer)
ipcMain.handle(IpcChannel.Mcp_ListTools, mcpService.listTools)
ipcMain.handle(IpcChannel.Mcp_CallTool, mcpService.callTool)
ipcMain.handle(IpcChannel.Mcp_ListPrompts, mcpService.listPrompts)
ipcMain.handle(IpcChannel.Mcp_GetPrompt, mcpService.getPrompt)
ipcMain.handle(IpcChannel.Mcp_ListResources, mcpService.listResources)
ipcMain.handle(IpcChannel.Mcp_GetResource, mcpService.getResource)
ipcMain.handle(IpcChannel.Mcp_GetInstallInfo, mcpService.getInstallInfo)
ipcMain.handle('app:is-binary-exist', (_, name: string) => isBinaryExists(name))
ipcMain.handle('app:get-binary-path', (_, name: string) => getBinaryPath(name))
ipcMain.handle('app:install-uv-binary', () => runInstallScript('install-uv.js'))
ipcMain.handle('app:install-bun-binary', () => runInstallScript('install-bun.js'))
ipcMain.handle(IpcChannel.App_IsBinaryExist, (_, name: string) => isBinaryExists(name))
ipcMain.handle(IpcChannel.App_GetBinaryPath, (_, name: string) => getBinaryPath(name))
ipcMain.handle(IpcChannel.App_InstallUvBinary, () => runInstallScript('install-uv.js'))
ipcMain.handle(IpcChannel.App_InstallBunBinary, () => runInstallScript('install-bun.js'))
//copilot
ipcMain.handle('copilot:get-auth-message', CopilotService.getAuthMessage)
ipcMain.handle('copilot:get-copilot-token', CopilotService.getCopilotToken)
ipcMain.handle('copilot:save-copilot-token', CopilotService.saveCopilotToken)
ipcMain.handle('copilot:get-token', CopilotService.getToken)
ipcMain.handle('copilot:logout', CopilotService.logout)
ipcMain.handle('copilot:get-user', CopilotService.getUser)
ipcMain.handle(IpcChannel.Copilot_GetAuthMessage, CopilotService.getAuthMessage)
ipcMain.handle(IpcChannel.Copilot_GetCopilotToken, CopilotService.getCopilotToken)
ipcMain.handle(IpcChannel.Copilot_SaveCopilotToken, CopilotService.saveCopilotToken)
ipcMain.handle(IpcChannel.Copilot_GetToken, CopilotService.getToken)
ipcMain.handle(IpcChannel.Copilot_Logout, CopilotService.logout)
ipcMain.handle(IpcChannel.Copilot_GetUser, CopilotService.getUser)
// Obsidian service
ipcMain.handle('obsidian:get-vaults', () => {
ipcMain.handle(IpcChannel.Obsidian_GetVaults, () => {
return obsidianVaultService.getVaults()
})
ipcMain.handle('obsidian:get-files', (_event, vaultName) => {
ipcMain.handle(IpcChannel.Obsidian_GetFiles, (_event, vaultName) => {
return obsidianVaultService.getFilesByVaultName(vaultName)
})
// nutstore
ipcMain.handle('nutstore:get-sso-url', NutstoreService.getNutstoreSSOUrl)
ipcMain.handle('nutstore:decrypt-token', (_, token: string) => NutstoreService.decryptToken(token))
ipcMain.handle('nutstore:get-directory-contents', (_, token: string, path: string) =>
ipcMain.handle(IpcChannel.Nutstore_GetSsoUrl, NutstoreService.getNutstoreSSOUrl)
ipcMain.handle(IpcChannel.Nutstore_DecryptToken, (_, token: string) => NutstoreService.decryptToken(token))
ipcMain.handle(IpcChannel.Nutstore_GetDirectoryContents, (_, token: string, path: string) =>
NutstoreService.getDirectoryContents(token, path)
)
// search window
ipcMain.handle(IpcChannel.SearchWindow_Open, async (_, uid: string) => {
await searchService.openSearchWindow(uid)
})
ipcMain.handle(IpcChannel.SearchWindow_Close, async (_, uid: string) => {
await searchService.closeSearchWindow(uid)
})
ipcMain.handle(IpcChannel.SearchWindow_OpenUrl, async (_, uid: string, url: string) => {
return await searchService.openUrlInSearchWindow(uid, url)
})
// sentry
ipcMain.handle(IpcChannel.Sentry_Init, () => initSentry())
}

View File

@ -0,0 +1,374 @@
// Brave Search MCP Server
// port https://github.com/modelcontextprotocol/servers/blob/main/src/brave-search/index.ts
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ListToolsRequestSchema, Tool } from '@modelcontextprotocol/sdk/types.js'
const WEB_SEARCH_TOOL: Tool = {
name: 'brave_web_search',
description:
'Performs a web search using the Brave Search API, ideal for general queries, news, articles, and online content. ' +
'Use this for broad information gathering, recent events, or when you need diverse web sources. ' +
'Supports pagination, content filtering, and freshness controls. ' +
'Maximum 20 results per request, with offset for pagination. ',
inputSchema: {
type: 'object',
properties: {
query: {
type: 'string',
description: 'Search query (max 400 chars, 50 words)'
},
count: {
type: 'number',
description: 'Number of results (1-20, default 10)',
default: 10
},
offset: {
type: 'number',
description: 'Pagination offset (max 9, default 0)',
default: 0
}
},
required: ['query']
}
}
const LOCAL_SEARCH_TOOL: Tool = {
name: 'brave_local_search',
description:
"Searches for local businesses and places using Brave's Local Search API. " +
'Best for queries related to physical locations, businesses, restaurants, services, etc. ' +
'Returns detailed information including:\n' +
'- Business names and addresses\n' +
'- Ratings and review counts\n' +
'- Phone numbers and opening hours\n' +
"Use this when the query implies 'near me' or mentions specific locations. " +
'Automatically falls back to web search if no local results are found.',
inputSchema: {
type: 'object',
properties: {
query: {
type: 'string',
description: "Local search query (e.g. 'pizza near Central Park')"
},
count: {
type: 'number',
description: 'Number of results (1-20, default 5)',
default: 5
}
},
required: ['query']
}
}
const RATE_LIMIT = {
perSecond: 1,
perMonth: 15000
}
const requestCount = {
second: 0,
month: 0,
lastReset: Date.now()
}
function checkRateLimit() {
const now = Date.now()
if (now - requestCount.lastReset > 1000) {
requestCount.second = 0
requestCount.lastReset = now
}
if (requestCount.second >= RATE_LIMIT.perSecond || requestCount.month >= RATE_LIMIT.perMonth) {
throw new Error('Rate limit exceeded')
}
requestCount.second++
requestCount.month++
}
interface BraveWeb {
web?: {
results?: Array<{
title: string
description: string
url: string
language?: string
published?: string
rank?: number
}>
}
locations?: {
results?: Array<{
id: string // Required by API
title?: string
}>
}
}
interface BraveLocation {
id: string
name: string
address: {
streetAddress?: string
addressLocality?: string
addressRegion?: string
postalCode?: string
}
coordinates?: {
latitude: number
longitude: number
}
phone?: string
rating?: {
ratingValue?: number
ratingCount?: number
}
openingHours?: string[]
priceRange?: string
}
interface BravePoiResponse {
results: BraveLocation[]
}
interface BraveDescription {
descriptions: { [id: string]: string }
}
function isBraveWebSearchArgs(args: unknown): args is { query: string; count?: number } {
return (
typeof args === 'object' &&
args !== null &&
'query' in args &&
typeof (args as { query: string }).query === 'string'
)
}
function isBraveLocalSearchArgs(args: unknown): args is { query: string; count?: number } {
return (
typeof args === 'object' &&
args !== null &&
'query' in args &&
typeof (args as { query: string }).query === 'string'
)
}
async function performWebSearch(apiKey: string, query: string, count: number = 10, offset: number = 0) {
checkRateLimit()
const url = new URL('https://api.search.brave.com/res/v1/web/search')
url.searchParams.set('q', query)
url.searchParams.set('count', Math.min(count, 20).toString()) // API limit
url.searchParams.set('offset', offset.toString())
const response = await fetch(url, {
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': apiKey
}
})
if (!response.ok) {
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
}
const data = (await response.json()) as BraveWeb
// Extract just web results
const results = (data.web?.results || []).map((result) => ({
title: result.title || '',
description: result.description || '',
url: result.url || ''
}))
return results.map((r) => `Title: ${r.title}\nDescription: ${r.description}\nURL: ${r.url}`).join('\n\n')
}
async function performLocalSearch(apiKey: string, query: string, count: number = 5) {
checkRateLimit()
// Initial search to get location IDs
const webUrl = new URL('https://api.search.brave.com/res/v1/web/search')
webUrl.searchParams.set('q', query)
webUrl.searchParams.set('search_lang', 'en')
webUrl.searchParams.set('result_filter', 'locations')
webUrl.searchParams.set('count', Math.min(count, 20).toString())
const webResponse = await fetch(webUrl, {
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': apiKey
}
})
if (!webResponse.ok) {
throw new Error(`Brave API error: ${webResponse.status} ${webResponse.statusText}\n${await webResponse.text()}`)
}
const webData = (await webResponse.json()) as BraveWeb
const locationIds =
webData.locations?.results?.filter((r): r is { id: string; title?: string } => r.id != null).map((r) => r.id) || []
if (locationIds.length === 0) {
return performWebSearch(apiKey, query, count) // Fallback to web search
}
// Get POI details and descriptions in parallel
const [poisData, descriptionsData] = await Promise.all([
getPoisData(apiKey, locationIds),
getDescriptionsData(apiKey, locationIds)
])
return formatLocalResults(poisData, descriptionsData)
}
async function getPoisData(apiKey: string, ids: string[]): Promise<BravePoiResponse> {
checkRateLimit()
const url = new URL('https://api.search.brave.com/res/v1/local/pois')
ids.filter(Boolean).forEach((id) => url.searchParams.append('ids', id))
const response = await fetch(url, {
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': apiKey
}
})
if (!response.ok) {
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
}
const poisResponse = (await response.json()) as BravePoiResponse
return poisResponse
}
async function getDescriptionsData(apiKey: string, ids: string[]): Promise<BraveDescription> {
checkRateLimit()
const url = new URL('https://api.search.brave.com/res/v1/local/descriptions')
ids.filter(Boolean).forEach((id) => url.searchParams.append('ids', id))
const response = await fetch(url, {
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': apiKey
}
})
if (!response.ok) {
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
}
const descriptionsData = (await response.json()) as BraveDescription
return descriptionsData
}
function formatLocalResults(poisData: BravePoiResponse, descData: BraveDescription): string {
return (
(poisData.results || [])
.map((poi) => {
const address =
[
poi.address?.streetAddress ?? '',
poi.address?.addressLocality ?? '',
poi.address?.addressRegion ?? '',
poi.address?.postalCode ?? ''
]
.filter((part) => part !== '')
.join(', ') || 'N/A'
return `Name: ${poi.name}
Address: ${address}
Phone: ${poi.phone || 'N/A'}
Rating: ${poi.rating?.ratingValue ?? 'N/A'} (${poi.rating?.ratingCount ?? 0} reviews)
Price Range: ${poi.priceRange || 'N/A'}
Hours: ${(poi.openingHours || []).join(', ') || 'N/A'}
Description: ${descData.descriptions[poi.id] || 'No description available'}
`
})
.join('\n---\n') || 'No local results found'
)
}
class BraveSearchServer {
public server: Server
private apiKey: string
constructor(apiKey: string) {
if (!apiKey) {
throw new Error('BRAVE_API_KEY is required for Brave Search MCP server')
}
this.apiKey = apiKey
this.server = new Server(
{
name: 'brave-search-server',
version: '0.1.0'
},
{
capabilities: {
tools: {}
}
}
)
this.initialize()
}
initialize() {
// Tool handlers
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [WEB_SEARCH_TOOL, LOCAL_SEARCH_TOOL]
}))
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
const { name, arguments: args } = request.params
if (!args) {
throw new Error('No arguments provided')
}
switch (name) {
case 'brave_web_search': {
if (!isBraveWebSearchArgs(args)) {
throw new Error('Invalid arguments for brave_web_search')
}
const { query, count = 10 } = args
const results = await performWebSearch(this.apiKey, query, count)
return {
content: [{ type: 'text', text: results }],
isError: false
}
}
case 'brave_local_search': {
if (!isBraveLocalSearchArgs(args)) {
throw new Error('Invalid arguments for brave_local_search')
}
const { query, count = 5 } = args
const results = await performLocalSearch(this.apiKey, query, count)
return {
content: [{ type: 'text', text: results }],
isError: false
}
}
default:
return {
content: [{ type: 'text', text: `Unknown tool: ${name}` }],
isError: true
}
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `Error: ${error instanceof Error ? error.message : String(error)}`
}
],
isError: true
}
}
})
}
}
export default BraveSearchServer

View File

@ -0,0 +1,32 @@
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import Logger from 'electron-log'
import BraveSearchServer from './brave-search'
import FetchServer from './fetch'
import FileSystemServer from './filesystem'
import MemoryServer from './memory'
import ThinkingServer from './sequentialthinking'
export function createInMemoryMCPServer(name: string, args: string[] = [], envs: Record<string, string> = {}): Server {
Logger.info(`[MCP] Creating in-memory MCP server: ${name} with args: ${args} and envs: ${JSON.stringify(envs)}`)
switch (name) {
case '@cherry/memory': {
const envPath = envs.MEMORY_FILE_PATH
return new MemoryServer(envPath).server
}
case '@cherry/sequentialthinking': {
return new ThinkingServer().server
}
case '@cherry/brave-search': {
return new BraveSearchServer(envs.BRAVE_API_KEY).server
}
case '@cherry/fetch': {
return new FetchServer().server
}
case '@cherry/filesystem': {
return new FileSystemServer(args).server
}
default:
throw new Error(`Unknown in-memory MCP server: ${name}`)
}
}

View File

@ -0,0 +1,236 @@
// port https://github.com/zcaceres/fetch-mcp/blob/main/src/index.ts
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
import { JSDOM } from 'jsdom'
import TurndownService from 'turndown'
import { z } from 'zod'
export const RequestPayloadSchema = z.object({
url: z.string().url(),
headers: z.record(z.string()).optional()
})
export type RequestPayload = z.infer<typeof RequestPayloadSchema>
export class Fetcher {
private static async _fetch({ url, headers }: RequestPayload): Promise<Response> {
try {
const response = await fetch(url, {
headers: {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
...headers
}
})
if (!response.ok) {
throw new Error(`HTTP error: ${response.status}`)
}
return response
} catch (e: unknown) {
if (e instanceof Error) {
throw new Error(`Failed to fetch ${url}: ${e.message}`)
} else {
throw new Error(`Failed to fetch ${url}: Unknown error`)
}
}
}
static async html(requestPayload: RequestPayload) {
try {
const response = await this._fetch(requestPayload)
const html = await response.text()
return { content: [{ type: 'text', text: html }], isError: false }
} catch (error) {
return {
content: [{ type: 'text', text: (error as Error).message }],
isError: true
}
}
}
static async json(requestPayload: RequestPayload) {
try {
const response = await this._fetch(requestPayload)
const json = await response.json()
return {
content: [{ type: 'text', text: JSON.stringify(json) }],
isError: false
}
} catch (error) {
return {
content: [{ type: 'text', text: (error as Error).message }],
isError: true
}
}
}
static async txt(requestPayload: RequestPayload) {
try {
const response = await this._fetch(requestPayload)
const html = await response.text()
const dom = new JSDOM(html)
const document = dom.window.document
const scripts = document.getElementsByTagName('script')
const styles = document.getElementsByTagName('style')
Array.from(scripts).forEach((script: any) => script.remove())
Array.from(styles).forEach((style: any) => style.remove())
const text = document.body.textContent || ''
const normalizedText = text.replace(/\s+/g, ' ').trim()
return {
content: [{ type: 'text', text: normalizedText }],
isError: false
}
} catch (error) {
return {
content: [{ type: 'text', text: (error as Error).message }],
isError: true
}
}
}
static async markdown(requestPayload: RequestPayload) {
try {
const response = await this._fetch(requestPayload)
const html = await response.text()
const turndownService = new TurndownService()
const markdown = turndownService.turndown(html)
return { content: [{ type: 'text', text: markdown }], isError: false }
} catch (error) {
return {
content: [{ type: 'text', text: (error as Error).message }],
isError: true
}
}
}
}
const server = new Server(
{
name: 'zcaceres/fetch',
version: '0.1.0'
},
{
capabilities: {
resources: {},
tools: {}
}
}
)
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: 'fetch_html',
description: 'Fetch a website and return the content as HTML',
inputSchema: {
type: 'object',
properties: {
url: {
type: 'string',
description: 'URL of the website to fetch'
},
headers: {
type: 'object',
description: 'Optional headers to include in the request'
}
},
required: ['url']
}
},
{
name: 'fetch_markdown',
description: 'Fetch a website and return the content as Markdown',
inputSchema: {
type: 'object',
properties: {
url: {
type: 'string',
description: 'URL of the website to fetch'
},
headers: {
type: 'object',
description: 'Optional headers to include in the request'
}
},
required: ['url']
}
},
{
name: 'fetch_txt',
description: 'Fetch a website, return the content as plain text (no HTML)',
inputSchema: {
type: 'object',
properties: {
url: {
type: 'string',
description: 'URL of the website to fetch'
},
headers: {
type: 'object',
description: 'Optional headers to include in the request'
}
},
required: ['url']
}
},
{
name: 'fetch_json',
description: 'Fetch a JSON file from a URL',
inputSchema: {
type: 'object',
properties: {
url: {
type: 'string',
description: 'URL of the JSON to fetch'
},
headers: {
type: 'object',
description: 'Optional headers to include in the request'
}
},
required: ['url']
}
}
]
}
})
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { arguments: args } = request.params
const validatedArgs = RequestPayloadSchema.parse(args)
if (request.params.name === 'fetch_html') {
const fetchResult = await Fetcher.html(validatedArgs)
return fetchResult
}
if (request.params.name === 'fetch_json') {
const fetchResult = await Fetcher.json(validatedArgs)
return fetchResult
}
if (request.params.name === 'fetch_txt') {
const fetchResult = await Fetcher.txt(validatedArgs)
return fetchResult
}
if (request.params.name === 'fetch_markdown') {
const fetchResult = await Fetcher.markdown(validatedArgs)
return fetchResult
}
throw new Error('Tool not found')
})
class FetchServer {
public server: Server
constructor() {
this.server = server
}
}
export default FetchServer

View File

@ -0,0 +1,655 @@
// port https://github.com/modelcontextprotocol/servers/blob/main/src/filesystem/index.ts
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ListToolsRequestSchema, ToolSchema } from '@modelcontextprotocol/sdk/types.js'
import { createTwoFilesPatch } from 'diff'
import fs from 'fs/promises'
import { minimatch } from 'minimatch'
import os from 'os'
import path from 'path'
import { z } from 'zod'
import { zodToJsonSchema } from 'zod-to-json-schema'
// Normalize all paths consistently
function normalizePath(p: string): string {
return path.normalize(p)
}
function expandHome(filepath: string): string {
if (filepath.startsWith('~/') || filepath === '~') {
return path.join(os.homedir(), filepath.slice(1))
}
return filepath
}
// Security utilities
async function validatePath(allowedDirectories: string[], requestedPath: string): Promise<string> {
const expandedPath = expandHome(requestedPath)
const absolute = path.isAbsolute(expandedPath)
? path.resolve(expandedPath)
: path.resolve(process.cwd(), expandedPath)
const normalizedRequested = normalizePath(absolute)
// Check if path is within allowed directories
const isAllowed = allowedDirectories.some((dir) => normalizedRequested.startsWith(dir))
if (!isAllowed) {
throw new Error(
`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`
)
}
// Handle symlinks by checking their real path
try {
const realPath = await fs.realpath(absolute)
const normalizedReal = normalizePath(realPath)
const isRealPathAllowed = allowedDirectories.some((dir) => normalizedReal.startsWith(dir))
if (!isRealPathAllowed) {
throw new Error('Access denied - symlink target outside allowed directories')
}
return realPath
} catch (error) {
// For new files that don't exist yet, verify parent directory
const parentDir = path.dirname(absolute)
try {
const realParentPath = await fs.realpath(parentDir)
const normalizedParent = normalizePath(realParentPath)
const isParentAllowed = allowedDirectories.some((dir) => normalizedParent.startsWith(dir))
if (!isParentAllowed) {
throw new Error('Access denied - parent directory outside allowed directories')
}
return absolute
} catch {
throw new Error(`Parent directory does not exist: ${parentDir}`)
}
}
}
// Schema definitions
const ReadFileArgsSchema = z.object({
path: z.string()
})
const ReadMultipleFilesArgsSchema = z.object({
paths: z.array(z.string())
})
const WriteFileArgsSchema = z.object({
path: z.string(),
content: z.string()
})
const EditOperation = z.object({
oldText: z.string().describe('Text to search for - must match exactly'),
newText: z.string().describe('Text to replace with')
})
const EditFileArgsSchema = z.object({
path: z.string(),
edits: z.array(EditOperation),
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
})
const CreateDirectoryArgsSchema = z.object({
path: z.string()
})
const ListDirectoryArgsSchema = z.object({
path: z.string()
})
const DirectoryTreeArgsSchema = z.object({
path: z.string()
})
const MoveFileArgsSchema = z.object({
source: z.string(),
destination: z.string()
})
const SearchFilesArgsSchema = z.object({
path: z.string(),
pattern: z.string(),
excludePatterns: z.array(z.string()).optional().default([])
})
const GetFileInfoArgsSchema = z.object({
path: z.string()
})
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const ToolInputSchema = ToolSchema.shape.inputSchema
type ToolInput = z.infer<typeof ToolInputSchema>
interface FileInfo {
size: number
created: Date
modified: Date
accessed: Date
isDirectory: boolean
isFile: boolean
permissions: string
}
// Tool implementations
async function getFileStats(filePath: string): Promise<FileInfo> {
const stats = await fs.stat(filePath)
return {
size: stats.size,
created: stats.birthtime,
modified: stats.mtime,
accessed: stats.atime,
isDirectory: stats.isDirectory(),
isFile: stats.isFile(),
permissions: stats.mode.toString(8).slice(-3)
}
}
async function searchFiles(
allowedDirectories: string[],
rootPath: string,
pattern: string,
excludePatterns: string[] = []
): Promise<string[]> {
const results: string[] = []
async function search(currentPath: string) {
const entries = await fs.readdir(currentPath, { withFileTypes: true })
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name)
try {
// Validate each path before processing
await validatePath(allowedDirectories, fullPath)
// Check if path matches any exclude pattern
const relativePath = path.relative(rootPath, fullPath)
const shouldExclude = excludePatterns.some((pattern) => {
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`
return minimatch(relativePath, globPattern, { dot: true })
})
if (shouldExclude) {
continue
}
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
results.push(fullPath)
}
if (entry.isDirectory()) {
await search(fullPath)
}
} catch (error) {
// Skip invalid paths during search
continue
}
}
}
await search(rootPath)
return results
}
// file editing and diffing utilities
function normalizeLineEndings(text: string): string {
return text.replace(/\r\n/g, '\n')
}
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
// Ensure consistent line endings for diff
const normalizedOriginal = normalizeLineEndings(originalContent)
const normalizedNew = normalizeLineEndings(newContent)
return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, 'original', 'modified')
}
async function applyFileEdits(
filePath: string,
edits: Array<{ oldText: string; newText: string }>,
dryRun = false
): Promise<string> {
// Read file content and normalize line endings
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'))
// Apply edits sequentially
let modifiedContent = content
for (const edit of edits) {
const normalizedOld = normalizeLineEndings(edit.oldText)
const normalizedNew = normalizeLineEndings(edit.newText)
// If exact match exists, use it
if (modifiedContent.includes(normalizedOld)) {
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew)
continue
}
// Otherwise, try line-by-line matching with flexibility for whitespace
const oldLines = normalizedOld.split('\n')
const contentLines = modifiedContent.split('\n')
let matchFound = false
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
const potentialMatch = contentLines.slice(i, i + oldLines.length)
// Compare lines with normalized whitespace
const isMatch = oldLines.every((oldLine, j) => {
const contentLine = potentialMatch[j]
return oldLine.trim() === contentLine.trim()
})
if (isMatch) {
// Preserve original indentation of first line
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''
const newLines = normalizedNew.split('\n').map((line, j) => {
if (j === 0) return originalIndent + line.trimStart()
// For subsequent lines, try to preserve relative indentation
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''
const newIndent = line.match(/^\s*/)?.[0] || ''
if (oldIndent && newIndent) {
const relativeIndent = newIndent.length - oldIndent.length
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart()
}
return line
})
contentLines.splice(i, oldLines.length, ...newLines)
modifiedContent = contentLines.join('\n')
matchFound = true
break
}
}
if (!matchFound) {
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`)
}
}
// Create unified diff
const diff = createUnifiedDiff(content, modifiedContent, filePath)
// Format diff with appropriate number of backticks
let numBackticks = 3
while (diff.includes('`'.repeat(numBackticks))) {
numBackticks++
}
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`
if (!dryRun) {
await fs.writeFile(filePath, modifiedContent, 'utf-8')
}
return formattedDiff
}
class FileSystemServer {
public server: Server
private allowedDirectories: string[]
constructor(allowedDirs: string[]) {
if (!Array.isArray(allowedDirs) || allowedDirs.length === 0) {
throw new Error('No allowed directories provided, please specify at least one directory in args')
}
this.allowedDirectories = allowedDirs.map((dir) => normalizePath(path.resolve(expandHome(dir))))
// Validate that all directories exist and are accessible
this.validateDirs().catch((error) => {
console.error('Error validating allowed directories:', error)
throw new Error(`Error validating allowed directories: ${error}`)
})
this.server = new Server(
{
name: 'secure-filesystem-server',
version: '0.2.0'
},
{
capabilities: {
tools: {}
}
}
)
this.initialize()
}
async validateDirs() {
// Validate that all directories exist and are accessible
await Promise.all(
this.allowedDirectories.map(async (dir) => {
try {
const stats = await fs.stat(expandHome(dir))
if (!stats.isDirectory()) {
console.error(`Error: ${dir} is not a directory`)
throw new Error(`Error: ${dir} is not a directory`)
}
} catch (error: any) {
console.error(`Error accessing directory ${dir}:`, error)
throw new Error(`Error accessing directory ${dir}:`, error)
}
})
)
}
initialize() {
// Tool handlers
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: 'read_file',
description:
'Read the complete contents of a file from the file system. ' +
'Handles various text encodings and provides detailed error messages ' +
'if the file cannot be read. Use this tool when you need to examine ' +
'the contents of a single file. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput
},
{
name: 'read_multiple_files',
description:
'Read the contents of multiple files simultaneously. This is more ' +
'efficient than reading files one by one when you need to analyze ' +
"or compare multiple files. Each file's content is returned with its " +
"path as a reference. Failed reads for individual files won't stop " +
'the entire operation. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput
},
{
name: 'write_file',
description:
'Create a new file or completely overwrite an existing file with new content. ' +
'Use with caution as it will overwrite existing files without warning. ' +
'Handles text content with proper encoding. Only works within allowed directories.',
inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput
},
{
name: 'edit_file',
description:
'Make line-based edits to a text file. Each edit replaces exact line sequences ' +
'with new content. Returns a git-style diff showing the changes made. ' +
'Only works within allowed directories.',
inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput
},
{
name: 'create_directory',
description:
'Create a new directory or ensure a directory exists. Can create multiple ' +
'nested directories in one operation. If the directory already exists, ' +
'this operation will succeed silently. Perfect for setting up directory ' +
'structures for projects or ensuring required paths exist. Only works within allowed directories.',
inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput
},
{
name: 'list_directory',
description:
'Get a detailed listing of all files and directories in a specified path. ' +
'Results clearly distinguish between files and directories with [FILE] and [DIR] ' +
'prefixes. This tool is essential for understanding directory structure and ' +
'finding specific files within a directory. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput
},
{
name: 'directory_tree',
description:
'Get a recursive tree view of files and directories as a JSON structure. ' +
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
'Files have no children array, while directories always have a children array (which may be empty). ' +
'The output is formatted with 2-space indentation for readability. Only works within allowed directories.',
inputSchema: zodToJsonSchema(DirectoryTreeArgsSchema) as ToolInput
},
{
name: 'move_file',
description:
'Move or rename files and directories. Can move files between directories ' +
'and rename them in a single operation. If the destination exists, the ' +
'operation will fail. Works across different directories and can be used ' +
'for simple renaming within the same directory. Both source and destination must be within allowed directories.',
inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput
},
{
name: 'search_files',
description:
'Recursively search for files and directories matching a pattern. ' +
'Searches through all subdirectories from the starting path. The search ' +
'is case-insensitive and matches partial names. Returns full paths to all ' +
"matching items. Great for finding files when you don't know their exact location. " +
'Only searches within allowed directories.',
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput
},
{
name: 'get_file_info',
description:
'Retrieve detailed metadata about a file or directory. Returns comprehensive ' +
'information including size, creation time, last modified time, permissions, ' +
'and type. This tool is perfect for understanding file characteristics ' +
'without reading the actual content. Only works within allowed directories.',
inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput
},
{
name: 'list_allowed_directories',
description:
'Returns the list of directories that this server is allowed to access. ' +
'Use this to understand which directories are available before trying to access files.',
inputSchema: {
type: 'object',
properties: {},
required: []
}
}
]
}
})
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
const { name, arguments: args } = request.params
switch (name) {
case 'read_file': {
const parsed = ReadFileArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for read_file: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
const content = await fs.readFile(validPath, 'utf-8')
return {
content: [{ type: 'text', text: content }]
}
}
case 'read_multiple_files': {
const parsed = ReadMultipleFilesArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`)
}
const results = await Promise.all(
parsed.data.paths.map(async (filePath: string) => {
try {
const validPath = await validatePath(this.allowedDirectories, filePath)
const content = await fs.readFile(validPath, 'utf-8')
return `${filePath}:\n${content}\n`
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
return `${filePath}: Error - ${errorMessage}`
}
})
)
return {
content: [{ type: 'text', text: results.join('\n---\n') }]
}
}
case 'write_file': {
const parsed = WriteFileArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for write_file: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
return {
content: [{ type: 'text', text: `Successfully wrote to ${parsed.data.path}` }]
}
}
case 'edit_file': {
const parsed = EditFileArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun)
return {
content: [{ type: 'text', text: result }]
}
}
case 'create_directory': {
const parsed = CreateDirectoryArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
await fs.mkdir(validPath, { recursive: true })
return {
content: [{ type: 'text', text: `Successfully created directory ${parsed.data.path}` }]
}
}
case 'list_directory': {
const parsed = ListDirectoryArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
const entries = await fs.readdir(validPath, { withFileTypes: true })
const formatted = entries
.map((entry) => `${entry.isDirectory() ? '[DIR]' : '[FILE]'} ${entry.name}`)
.join('\n')
return {
content: [{ type: 'text', text: formatted }]
}
}
case 'directory_tree': {
const parsed = DirectoryTreeArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`)
}
interface TreeEntry {
name: string
type: 'file' | 'directory'
children?: TreeEntry[]
}
async function buildTree(allowedDirectories: string[], currentPath: string): Promise<TreeEntry[]> {
const validPath = await validatePath(allowedDirectories, currentPath)
const entries = await fs.readdir(validPath, { withFileTypes: true })
const result: TreeEntry[] = []
for (const entry of entries) {
const entryData: TreeEntry = {
name: entry.name,
type: entry.isDirectory() ? 'directory' : 'file'
}
if (entry.isDirectory()) {
const subPath = path.join(currentPath, entry.name)
entryData.children = await buildTree(allowedDirectories, subPath)
}
result.push(entryData)
}
return result
}
const treeData = await buildTree(this.allowedDirectories, parsed.data.path)
return {
content: [
{
type: 'text',
text: JSON.stringify(treeData, null, 2)
}
]
}
}
case 'move_file': {
const parsed = MoveFileArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for move_file: ${parsed.error}`)
}
const validSourcePath = await validatePath(this.allowedDirectories, parsed.data.source)
const validDestPath = await validatePath(this.allowedDirectories, parsed.data.destination)
await fs.rename(validSourcePath, validDestPath)
return {
content: [
{ type: 'text', text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }
]
}
}
case 'search_files': {
const parsed = SearchFilesArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for search_files: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
const results = await searchFiles(
this.allowedDirectories,
validPath,
parsed.data.pattern,
parsed.data.excludePatterns
)
return {
content: [{ type: 'text', text: results.length > 0 ? results.join('\n') : 'No matches found' }]
}
}
case 'get_file_info': {
const parsed = GetFileInfoArgsSchema.safeParse(args)
if (!parsed.success) {
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`)
}
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
const info = await getFileStats(validPath)
return {
content: [
{
type: 'text',
text: Object.entries(info)
.map(([key, value]) => `${key}: ${value}`)
.join('\n')
}
]
}
}
case 'list_allowed_directories': {
return {
content: [
{
type: 'text',
text: `Allowed directories:\n${this.allowedDirectories.join('\n')}`
}
]
}
}
default:
throw new Error(`Unknown tool: ${name}`)
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
return {
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
isError: true
}
}
})
}
}
export default FileSystemServer

View File

@ -0,0 +1,700 @@
import { getConfigDir } from '@main/utils/file'
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } from '@modelcontextprotocol/sdk/types.js'
import { Mutex } from 'async-mutex' // 引入 Mutex
import { promises as fs } from 'fs'
import path from 'path'
// Define memory file path
const defaultMemoryPath = path.join(getConfigDir(), 'memory.json')
// Interfaces remain the same
interface Entity {
name: string
entityType: string
observations: string[]
}
interface Relation {
from: string
to: string
relationType: string
}
// Structure for storing the graph in memory and in the file
interface KnowledgeGraph {
entities: Entity[]
relations: Relation[]
}
// The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
class KnowledgeGraphManager {
private memoryPath: string
private entities: Map<string, Entity> // Use Map for efficient entity lookup
private relations: Set<string> // Store stringified relations for easy Set operations
private fileMutex: Mutex // Mutex for file writing
private constructor(memoryPath: string) {
this.memoryPath = memoryPath
this.entities = new Map<string, Entity>()
this.relations = new Set<string>()
this.fileMutex = new Mutex()
}
// Static async factory method for initialization
public static async create(memoryPath: string): Promise<KnowledgeGraphManager> {
const manager = new KnowledgeGraphManager(memoryPath)
await manager._ensureMemoryPathExists()
await manager._loadGraphFromDisk()
return manager
}
private async _ensureMemoryPathExists(): Promise<void> {
try {
const directory = path.dirname(this.memoryPath)
await fs.mkdir(directory, { recursive: true })
try {
await fs.access(this.memoryPath)
} catch (error) {
// File doesn't exist, create an empty file with initial structure
await fs.writeFile(this.memoryPath, JSON.stringify({ entities: [], relations: [] }, null, 2))
}
} catch (error) {
console.error('Failed to ensure memory path exists:', error)
// Propagate the error or handle it more gracefully depending on requirements
throw new McpError(
ErrorCode.InternalError,
`Failed to ensure memory path: ${error instanceof Error ? error.message : String(error)}`
)
}
}
// Load graph from disk into memory (called once during initialization)
private async _loadGraphFromDisk(): Promise<void> {
try {
const data = await fs.readFile(this.memoryPath, 'utf-8')
// Handle empty file case
if (data.trim() === '') {
this.entities = new Map()
this.relations = new Set()
// Optionally write the initial empty structure back
await this._persistGraph()
return
}
const graph: KnowledgeGraph = JSON.parse(data)
this.entities.clear()
this.relations.clear()
graph.entities.forEach((entity) => this.entities.set(entity.name, entity))
graph.relations.forEach((relation) => this.relations.add(this._serializeRelation(relation)))
} catch (error) {
if (error instanceof Error && 'code' in error && (error as any).code === 'ENOENT') {
// File doesn't exist (should have been created by _ensureMemoryPathExists, but handle defensively)
this.entities = new Map()
this.relations = new Set()
await this._persistGraph() // Create the file with empty structure
} else if (error instanceof SyntaxError) {
console.error('Failed to parse memory.json, initializing with empty graph:', error)
// If JSON is invalid, start fresh and overwrite the corrupted file
this.entities = new Map()
this.relations = new Set()
await this._persistGraph()
} else {
console.error('Failed to load knowledge graph from disk:', error)
throw new McpError(
ErrorCode.InternalError,
`Failed to load graph: ${error instanceof Error ? error.message : String(error)}`
)
}
}
}
// Persist the current in-memory graph to disk using a mutex
private async _persistGraph(): Promise<void> {
const release = await this.fileMutex.acquire()
try {
const graphData: KnowledgeGraph = {
entities: Array.from(this.entities.values()),
relations: Array.from(this.relations).map((rStr) => this._deserializeRelation(rStr))
}
await fs.writeFile(this.memoryPath, JSON.stringify(graphData, null, 2))
} catch (error) {
console.error('Failed to save knowledge graph:', error)
// Decide how to handle write errors - potentially retry or notify
throw new McpError(
ErrorCode.InternalError,
`Failed to save graph: ${error instanceof Error ? error.message : String(error)}`
)
} finally {
release()
}
}
// Helper to consistently serialize relations for Set storage
private _serializeRelation(relation: Relation): string {
// Simple serialization, ensure order doesn't matter if properties are consistent
return JSON.stringify({ from: relation.from, to: relation.to, relationType: relation.relationType })
}
// Helper to deserialize relations from Set storage
private _deserializeRelation(relationStr: string): Relation {
return JSON.parse(relationStr) as Relation
}
async createEntities(entities: Entity[]): Promise<Entity[]> {
const newEntities: Entity[] = []
entities.forEach((entity) => {
if (!this.entities.has(entity.name)) {
// Ensure observations is always an array
const newEntity = { ...entity, observations: Array.isArray(entity.observations) ? entity.observations : [] }
this.entities.set(entity.name, newEntity)
newEntities.push(newEntity)
}
})
if (newEntities.length > 0) {
await this._persistGraph()
}
return newEntities
}
async createRelations(relations: Relation[]): Promise<Relation[]> {
const newRelations: Relation[] = []
relations.forEach((relation) => {
// Ensure related entities exist before creating a relation
if (!this.entities.has(relation.from) || !this.entities.has(relation.to)) {
console.warn(`Skipping relation creation: Entity not found for relation ${relation.from} -> ${relation.to}`)
return // Skip this relation
}
const relationStr = this._serializeRelation(relation)
if (!this.relations.has(relationStr)) {
this.relations.add(relationStr)
newRelations.push(relation)
}
})
if (newRelations.length > 0) {
await this._persistGraph()
}
return newRelations
}
async addObservations(
observations: { entityName: string; contents: string[] }[]
): Promise<{ entityName: string; addedObservations: string[] }[]> {
const results: { entityName: string; addedObservations: string[] }[] = []
let changed = false
observations.forEach((o) => {
const entity = this.entities.get(o.entityName)
if (!entity) {
// Option 1: Throw error
throw new McpError(ErrorCode.InvalidParams, `Entity with name ${o.entityName} not found`)
// Option 2: Skip and warn
// console.warn(`Entity with name ${o.entityName} not found when adding observations. Skipping.`);
// return;
}
// Ensure observations array exists
if (!Array.isArray(entity.observations)) {
entity.observations = []
}
const newObservations = o.contents.filter((content) => !entity.observations.includes(content))
if (newObservations.length > 0) {
entity.observations.push(...newObservations)
results.push({ entityName: o.entityName, addedObservations: newObservations })
changed = true
} else {
// Still include in results even if nothing was added, to confirm processing
results.push({ entityName: o.entityName, addedObservations: [] })
}
})
if (changed) {
await this._persistGraph()
}
return results
}
async deleteEntities(entityNames: string[]): Promise<void> {
let changed = false
const namesToDelete = new Set(entityNames)
// Delete entities
namesToDelete.forEach((name) => {
if (this.entities.delete(name)) {
changed = true
}
})
// Delete relations involving deleted entities
const relationsToDelete = new Set<string>()
this.relations.forEach((relStr) => {
const rel = this._deserializeRelation(relStr)
if (namesToDelete.has(rel.from) || namesToDelete.has(rel.to)) {
relationsToDelete.add(relStr)
}
})
relationsToDelete.forEach((relStr) => {
if (this.relations.delete(relStr)) {
changed = true
}
})
if (changed) {
await this._persistGraph()
}
}
async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise<void> {
let changed = false
deletions.forEach((d) => {
const entity = this.entities.get(d.entityName)
if (entity && Array.isArray(entity.observations)) {
const initialLength = entity.observations.length
const observationsToDelete = new Set(d.observations)
entity.observations = entity.observations.filter((o) => !observationsToDelete.has(o))
if (entity.observations.length !== initialLength) {
changed = true
}
}
})
if (changed) {
await this._persistGraph()
}
}
async deleteRelations(relations: Relation[]): Promise<void> {
let changed = false
relations.forEach((rel) => {
const relStr = this._serializeRelation(rel)
if (this.relations.delete(relStr)) {
changed = true
}
})
if (changed) {
await this._persistGraph()
}
}
// Read the current state from memory
async readGraph(): Promise<KnowledgeGraph> {
// Return a deep copy to prevent external modification of the internal state
return JSON.parse(
JSON.stringify({
entities: Array.from(this.entities.values()),
relations: Array.from(this.relations).map((rStr) => this._deserializeRelation(rStr))
})
)
}
// Search operates on the in-memory graph
async searchNodes(query: string): Promise<KnowledgeGraph> {
const lowerCaseQuery = query.toLowerCase()
const filteredEntities = Array.from(this.entities.values()).filter(
(e) =>
e.name.toLowerCase().includes(lowerCaseQuery) ||
e.entityType.toLowerCase().includes(lowerCaseQuery) ||
(Array.isArray(e.observations) && e.observations.some((o) => o.toLowerCase().includes(lowerCaseQuery)))
)
const filteredEntityNames = new Set(filteredEntities.map((e) => e.name))
const filteredRelations = Array.from(this.relations)
.map((rStr) => this._deserializeRelation(rStr))
.filter((r) => filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to))
return {
entities: filteredEntities,
relations: filteredRelations
}
}
// Open operates on the in-memory graph
async openNodes(names: string[]): Promise<KnowledgeGraph> {
const nameSet = new Set(names)
const filteredEntities = Array.from(this.entities.values()).filter((e) => nameSet.has(e.name))
const filteredEntityNames = new Set(filteredEntities.map((e) => e.name))
const filteredRelations = Array.from(this.relations)
.map((rStr) => this._deserializeRelation(rStr))
.filter((r) => filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to))
return {
entities: filteredEntities,
relations: filteredRelations
}
}
}
class MemoryServer {
public server: Server
// Hold the manager instance, initialized asynchronously
private knowledgeGraphManager: KnowledgeGraphManager | null = null
private initializationPromise: Promise<void> // To track initialization
constructor(envPath: string = '') {
const memoryPath = envPath
? path.isAbsolute(envPath)
? envPath
: path.resolve(envPath) // Use path.resolve for relative paths based on CWD
: defaultMemoryPath
this.server = new Server(
{
name: 'memory-server',
version: '1.1.0' // Incremented version for changes
},
{
capabilities: {
tools: {}
}
}
)
// Start initialization, but don't block constructor
this.initializationPromise = this._initializeManager(memoryPath)
this.setupRequestHandlers() // Setup handlers immediately
}
// Private async method to handle manager initialization
private async _initializeManager(memoryPath: string): Promise<void> {
try {
this.knowledgeGraphManager = await KnowledgeGraphManager.create(memoryPath)
console.log('KnowledgeGraphManager initialized successfully.')
} catch (error) {
console.error('Failed to initialize KnowledgeGraphManager:', error)
// Server might be unusable, consider how to handle this state
// Maybe set a flag and return errors for all tool calls?
this.knowledgeGraphManager = null // Ensure it's null if init fails
}
}
// Ensures the manager is initialized before handling tool calls
private async _getManager(): Promise<KnowledgeGraphManager> {
await this.initializationPromise // Wait for initialization to complete
if (!this.knowledgeGraphManager) {
throw new McpError(ErrorCode.InternalError, 'Memory server failed to initialize. Cannot process requests.')
}
return this.knowledgeGraphManager
}
// Setup handlers (can be called from constructor)
setupRequestHandlers() {
// ListTools remains largely the same, descriptions might be updated if needed
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
// Ensure manager is ready before listing tools that depend on it
// Although ListTools itself doesn't *call* the manager, it implies the
// manager is ready to handle calls for those tools.
try {
await this._getManager() // Wait for initialization before confirming tools are available
} catch (error) {
// If manager failed to init, maybe return an empty tool list or throw?
console.error('Cannot list tools, manager initialization failed:', error)
return { tools: [] } // Return empty list if server is not ready
}
return {
tools: [
{
name: 'create_entities',
description: 'Create multiple new entities in the knowledge graph. Skips existing entities.',
inputSchema: {
type: 'object',
properties: {
entities: {
type: 'array',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'The name of the entity' },
entityType: { type: 'string', description: 'The type of the entity' },
observations: {
type: 'array',
items: { type: 'string' },
description: 'An array of observation contents associated with the entity',
default: [] // Add default empty array
}
},
required: ['name', 'entityType'] // Observations are optional now on creation
}
}
},
required: ['entities']
}
},
{
name: 'create_relations',
description:
'Create multiple new relations between EXISTING entities. Skips existing relations or relations with non-existent entities.',
inputSchema: {
type: 'object',
properties: {
relations: {
type: 'array',
items: {
type: 'object',
properties: {
from: { type: 'string', description: 'The name of the entity where the relation starts' },
to: { type: 'string', description: 'The name of the entity where the relation ends' },
relationType: { type: 'string', description: 'The type of the relation' }
},
required: ['from', 'to', 'relationType']
}
}
},
required: ['relations']
}
},
{
name: 'add_observations',
description: 'Add new observations to existing entities. Skips duplicate observations.',
inputSchema: {
type: 'object',
properties: {
observations: {
type: 'array',
items: {
type: 'object',
properties: {
entityName: { type: 'string', description: 'The name of the entity to add the observations to' },
contents: {
type: 'array',
items: { type: 'string' },
description: 'An array of observation contents to add'
}
},
required: ['entityName', 'contents']
}
}
},
required: ['observations']
}
},
{
name: 'delete_entities',
description: 'Delete multiple entities and their associated relations.',
inputSchema: {
type: 'object',
properties: {
entityNames: {
type: 'array',
items: { type: 'string' },
description: 'An array of entity names to delete'
}
},
required: ['entityNames']
}
},
{
name: 'delete_observations',
description: 'Delete specific observations from entities.',
inputSchema: {
type: 'object',
properties: {
deletions: {
type: 'array',
items: {
type: 'object',
properties: {
entityName: { type: 'string', description: 'The name of the entity containing the observations' },
observations: {
type: 'array',
items: { type: 'string' },
description: 'An array of observations to delete'
}
},
required: ['entityName', 'observations']
}
}
},
required: ['deletions']
}
},
{
name: 'delete_relations',
description: 'Delete multiple specific relations.',
inputSchema: {
type: 'object',
properties: {
relations: {
type: 'array',
items: {
type: 'object',
properties: {
from: { type: 'string', description: 'The name of the entity where the relation starts' },
to: { type: 'string', description: 'The name of the entity where the relation ends' },
relationType: { type: 'string', description: 'The type of the relation' }
},
required: ['from', 'to', 'relationType']
},
description: 'An array of relations to delete'
}
},
required: ['relations']
}
},
{
name: 'read_graph',
description: 'Read the entire knowledge graph from memory.',
inputSchema: {
type: 'object',
properties: {}
}
},
{
name: 'search_nodes',
description: 'Search nodes (entities and relations) in memory based on a query.',
inputSchema: {
type: 'object',
properties: {
query: {
type: 'string',
description: 'The search query to match against entity names, types, and observation content'
}
},
required: ['query']
}
},
{
name: 'open_nodes',
description: 'Retrieve specific entities and their connecting relations from memory by name.',
inputSchema: {
type: 'object',
properties: {
names: {
type: 'array',
items: { type: 'string' },
description: 'An array of entity names to retrieve'
}
},
required: ['names']
}
}
]
}
})
// CallTool handler needs to await the manager and the async methods
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
const manager = await this._getManager() // Ensure manager is ready
const { name, arguments: args } = request.params
if (!args) {
// Use McpError for standard errors
throw new McpError(ErrorCode.InvalidParams, `No arguments provided for tool: ${name}`)
}
try {
switch (name) {
case 'create_entities':
// Validate args structure if necessary, though SDK might do basic validation
if (!args.entities || !Array.isArray(args.entities)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'entities' array is required.`
)
}
return {
content: [
{ type: 'text', text: JSON.stringify(await manager.createEntities(args.entities as Entity[]), null, 2) }
]
}
case 'create_relations':
if (!args.relations || !Array.isArray(args.relations)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'relations' array is required.`
)
}
return {
content: [
{
type: 'text',
text: JSON.stringify(await manager.createRelations(args.relations as Relation[]), null, 2)
}
]
}
case 'add_observations':
if (!args.observations || !Array.isArray(args.observations)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'observations' array is required.`
)
}
return {
content: [
{
type: 'text',
text: JSON.stringify(
await manager.addObservations(args.observations as { entityName: string; contents: string[] }[]),
null,
2
)
}
]
}
case 'delete_entities':
if (!args.entityNames || !Array.isArray(args.entityNames)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'entityNames' array is required.`
)
}
await manager.deleteEntities(args.entityNames as string[])
return { content: [{ type: 'text', text: 'Entities deleted successfully' }] }
case 'delete_observations':
if (!args.deletions || !Array.isArray(args.deletions)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'deletions' array is required.`
)
}
await manager.deleteObservations(args.deletions as { entityName: string; observations: string[] }[])
return { content: [{ type: 'text', text: 'Observations deleted successfully' }] }
case 'delete_relations':
if (!args.relations || !Array.isArray(args.relations)) {
throw new McpError(
ErrorCode.InvalidParams,
`Invalid arguments for ${name}: 'relations' array is required.`
)
}
await manager.deleteRelations(args.relations as Relation[])
return { content: [{ type: 'text', text: 'Relations deleted successfully' }] }
case 'read_graph':
// No arguments expected or needed for read_graph based on original schema
return {
content: [{ type: 'text', text: JSON.stringify(await manager.readGraph(), null, 2) }]
}
case 'search_nodes':
if (typeof args.query !== 'string') {
throw new McpError(ErrorCode.InvalidParams, `Invalid arguments for ${name}: 'query' string is required.`)
}
return {
content: [
{ type: 'text', text: JSON.stringify(await manager.searchNodes(args.query as string), null, 2) }
]
}
case 'open_nodes':
if (!args.names || !Array.isArray(args.names)) {
throw new McpError(ErrorCode.InvalidParams, `Invalid arguments for ${name}: 'names' array is required.`)
}
return {
content: [
{ type: 'text', text: JSON.stringify(await manager.openNodes(args.names as string[]), null, 2) }
]
}
default:
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`)
}
} catch (error) {
// Catch errors from manager methods (like entity not found) or other issues
if (error instanceof McpError) {
throw error // Re-throw McpErrors directly
}
console.error(`Error executing tool ${name}:`, error)
// Throw a generic internal error for unexpected issues
throw new McpError(
ErrorCode.InternalError,
`Error executing tool ${name}: ${error instanceof Error ? error.message : String(error)}`
)
}
})
}
}
export default MemoryServer

View File

@ -0,0 +1,289 @@
// Sequential Thinking MCP Server
// port https://github.com/modelcontextprotocol/servers/blob/main/src/sequentialthinking/index.ts
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ListToolsRequestSchema, Tool } from '@modelcontextprotocol/sdk/types.js'
// Fixed chalk import for ESM
import chalk from 'chalk'
interface ThoughtData {
thought: string
thoughtNumber: number
totalThoughts: number
isRevision?: boolean
revisesThought?: number
branchFromThought?: number
branchId?: string
needsMoreThoughts?: boolean
nextThoughtNeeded: boolean
}
class SequentialThinkingServer {
private thoughtHistory: ThoughtData[] = []
private branches: Record<string, ThoughtData[]> = {}
private validateThoughtData(input: unknown): ThoughtData {
const data = input as Record<string, unknown>
if (!data.thought || typeof data.thought !== 'string') {
throw new Error('Invalid thought: must be a string')
}
if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') {
throw new Error('Invalid thoughtNumber: must be a number')
}
if (!data.totalThoughts || typeof data.totalThoughts !== 'number') {
throw new Error('Invalid totalThoughts: must be a number')
}
if (typeof data.nextThoughtNeeded !== 'boolean') {
throw new Error('Invalid nextThoughtNeeded: must be a boolean')
}
return {
thought: data.thought,
thoughtNumber: data.thoughtNumber,
totalThoughts: data.totalThoughts,
nextThoughtNeeded: data.nextThoughtNeeded,
isRevision: data.isRevision as boolean | undefined,
revisesThought: data.revisesThought as number | undefined,
branchFromThought: data.branchFromThought as number | undefined,
branchId: data.branchId as string | undefined,
needsMoreThoughts: data.needsMoreThoughts as boolean | undefined
}
}
private formatThought(thoughtData: ThoughtData): string {
const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } =
thoughtData
let prefix = ''
let context = ''
if (isRevision) {
prefix = chalk.yellow('🔄 Revision')
context = ` (revising thought ${revisesThought})`
} else if (branchFromThought) {
prefix = chalk.green('🌿 Branch')
context = ` (from thought ${branchFromThought}, ID: ${branchId})`
} else {
prefix = chalk.blue('💭 Thought')
context = ''
}
const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`
const border = '─'.repeat(Math.max(header.length, thought.length) + 4)
return `
${border}
${header}
${border}
${thought.padEnd(border.length - 2)}
${border}`
}
public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } {
try {
const validatedInput = this.validateThoughtData(input)
if (validatedInput.thoughtNumber > validatedInput.totalThoughts) {
validatedInput.totalThoughts = validatedInput.thoughtNumber
}
this.thoughtHistory.push(validatedInput)
if (validatedInput.branchFromThought && validatedInput.branchId) {
if (!this.branches[validatedInput.branchId]) {
this.branches[validatedInput.branchId] = []
}
this.branches[validatedInput.branchId].push(validatedInput)
}
const formattedThought = this.formatThought(validatedInput)
console.error(formattedThought)
return {
content: [
{
type: 'text',
text: JSON.stringify(
{
thoughtNumber: validatedInput.thoughtNumber,
totalThoughts: validatedInput.totalThoughts,
nextThoughtNeeded: validatedInput.nextThoughtNeeded,
branches: Object.keys(this.branches),
thoughtHistoryLength: this.thoughtHistory.length
},
null,
2
)
}
]
}
} catch (error) {
return {
content: [
{
type: 'text',
text: JSON.stringify(
{
error: error instanceof Error ? error.message : String(error),
status: 'failed'
},
null,
2
)
}
],
isError: true
}
}
}
}
const SEQUENTIAL_THINKING_TOOL: Tool = {
name: 'sequentialthinking',
description: `A detailed tool for dynamic and reflective problem-solving through thoughts.
This tool helps analyze problems through a flexible thinking process that can adapt and evolve.
Each thought can build on, question, or revise previous insights as understanding deepens.
When to use this tool:
- Breaking down complex problems into steps
- Planning and design with room for revision
- Analysis that might need course correction
- Problems where the full scope might not be clear initially
- Problems that require a multi-step solution
- Tasks that need to maintain context over multiple steps
- Situations where irrelevant information needs to be filtered out
Key features:
- You can adjust total_thoughts up or down as you progress
- You can question or revise previous thoughts
- You can add more thoughts even after reaching what seemed like the end
- You can express uncertainty and explore alternative approaches
- Not every thought needs to build linearly - you can branch or backtrack
- Generates a solution hypothesis
- Verifies the hypothesis based on the Chain of Thought steps
- Repeats the process until satisfied
- Provides a correct answer
Parameters explained:
- thought: Your current thinking step, which can include:
* Regular analytical steps
* Revisions of previous thoughts
* Questions about previous decisions
* Realizations about needing more analysis
* Changes in approach
* Hypothesis generation
* Hypothesis verification
- next_thought_needed: True if you need more thinking, even if at what seemed like the end
- thought_number: Current number in sequence (can go beyond initial total if needed)
- total_thoughts: Current estimate of thoughts needed (can be adjusted up/down)
- is_revision: A boolean indicating if this thought revises previous thinking
- revises_thought: If is_revision is true, which thought number is being reconsidered
- branch_from_thought: If branching, which thought number is the branching point
- branch_id: Identifier for the current branch (if any)
- needs_more_thoughts: If reaching end but realizing more thoughts needed
You should:
1. Start with an initial estimate of needed thoughts, but be ready to adjust
2. Feel free to question or revise previous thoughts
3. Don't hesitate to add more thoughts if needed, even at the "end"
4. Express uncertainty when present
5. Mark thoughts that revise previous thinking or branch into new paths
6. Ignore information that is irrelevant to the current step
7. Generate a solution hypothesis when appropriate
8. Verify the hypothesis based on the Chain of Thought steps
9. Repeat the process until satisfied with the solution
10. Provide a single, ideally correct answer as the final output
11. Only set next_thought_needed to false when truly done and a satisfactory answer is reached`,
inputSchema: {
type: 'object',
properties: {
thought: {
type: 'string',
description: 'Your current thinking step'
},
nextThoughtNeeded: {
type: 'boolean',
description: 'Whether another thought step is needed'
},
thoughtNumber: {
type: 'integer',
description: 'Current thought number',
minimum: 1
},
totalThoughts: {
type: 'integer',
description: 'Estimated total thoughts needed',
minimum: 1
},
isRevision: {
type: 'boolean',
description: 'Whether this revises previous thinking'
},
revisesThought: {
type: 'integer',
description: 'Which thought is being reconsidered',
minimum: 1
},
branchFromThought: {
type: 'integer',
description: 'Branching point thought number',
minimum: 1
},
branchId: {
type: 'string',
description: 'Branch identifier'
},
needsMoreThoughts: {
type: 'boolean',
description: 'If more thoughts are needed'
}
},
required: ['thought', 'nextThoughtNeeded', 'thoughtNumber', 'totalThoughts']
}
}
class ThinkingServer {
public server: Server
private thinkingServer: SequentialThinkingServer
constructor() {
this.thinkingServer = new SequentialThinkingServer()
this.server = new Server(
{
name: 'sequential-thinking-server',
version: '0.2.0'
},
{
capabilities: {
tools: {}
}
}
)
this.initialize()
}
initialize() {
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [SEQUENTIAL_THINKING_TOOL]
}))
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
if (request.params.name === 'sequentialthinking') {
return this.thinkingServer.processThought(request.params.arguments)
}
return {
content: [
{
type: 'text',
text: `Unknown tool: ${request.params.name}`
}
],
isError: true
}
})
}
}
export default ThinkingServer

View File

@ -3,14 +3,60 @@ import { KnowledgeBaseParams } from '@types'
export default abstract class BaseReranker {
protected base: KnowledgeBaseParams
constructor(base: KnowledgeBaseParams) {
if (!base.rerankModel) {
throw new Error('Rerank model is required')
}
this.base = base
}
abstract rerank(query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]>
/**
* Get Rerank Request Url
*/
protected getRerankUrl() {
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
? this.base.rerankBaseURL.slice(0, -1)
: this.base.rerankBaseURL
// 必须携带/v1否则会404
if (baseURL && !baseURL.endsWith('/v1')) {
baseURL = `${baseURL}/v1`
}
return `${baseURL}/rerank`
}
/**
* Get Rerank Result
* @param searchResults
* @param rerankResults
* @protected
*/
protected getRerankResult(
searchResults: ExtractChunkData[],
rerankResults: Array<{
index: number
relevance_score: number
}>
) {
const resultMap = new Map(rerankResults.map((result) => [result.index, result.relevance_score || 0]))
return searchResults
.map((doc: ExtractChunkData, index: number) => {
const score = resultMap.get(index)
if (score === undefined) return undefined
return {
...doc,
score
}
})
.filter((doc): doc is ExtractChunkData => doc !== undefined)
.sort((a, b) => b.score - a.score)
}
public defaultHeaders() {
return {
Authorization: `Bearer ${this.base.rerankApiKey}`,
@ -18,7 +64,7 @@ export default abstract class BaseReranker {
}
}
public formatErrorMessage(url: string, error: any, requestBody: any) {
protected formatErrorMessage(url: string, error: any, requestBody: any) {
const errorDetails = {
url: url,
message: error.message,

View File

@ -1,6 +1,6 @@
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import AxiosProxy from '@main/services/AxiosProxy'
import { KnowledgeBaseParams } from '@types'
import axios from 'axios'
import BaseReranker from './BaseReranker'
@ -10,16 +10,7 @@ export default class JinaReranker extends BaseReranker {
}
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
? this.base.rerankBaseURL.slice(0, -1)
: this.base.rerankBaseURL
// 必须携带/v1否则会404
if (baseURL && !baseURL.endsWith('/v1')) {
baseURL = `${baseURL}/v1`
}
const url = `${baseURL}/rerank`
const url = this.getRerankUrl()
const requestBody = {
model: this.base.rerankModel,
@ -29,26 +20,12 @@ export default class JinaReranker extends BaseReranker {
}
try {
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
const { data } = await AxiosProxy.axios.post(url, requestBody, { headers: this.defaultHeaders() })
const rerankResults = data.results
console.log(rerankResults)
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
return searchResults
.map((doc: ExtractChunkData, index: number) => {
const score = resultMap.get(index)
if (score === undefined) return undefined
return {
...doc,
score
}
})
.filter((doc): doc is ExtractChunkData => doc !== undefined)
.sort((a, b) => b.score - a.score)
return this.getRerankResult(searchResults, rerankResults)
} catch (error: any) {
const errorDetails = this.formatErrorMessage(url, error, requestBody)
console.error('Jina Reranker API Error:', errorDetails)
throw new Error(`重排序请求失败: ${error.message}\n请求详情: ${errorDetails}`)
}

View File

@ -1,6 +1,6 @@
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import axiosProxy from '@main/services/AxiosProxy'
import { KnowledgeBaseParams } from '@types'
import axios from 'axios'
import BaseReranker from './BaseReranker'
@ -10,16 +10,7 @@ export default class SiliconFlowReranker extends BaseReranker {
}
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
? this.base.rerankBaseURL.slice(0, -1)
: this.base.rerankBaseURL
// 必须携带/v1否则会404
if (baseURL && !baseURL.endsWith('/v1')) {
baseURL = `${baseURL}/v1`
}
const url = `${baseURL}/rerank`
const url = this.getRerankUrl()
const requestBody = {
model: this.base.rerankModel,
@ -31,23 +22,10 @@ export default class SiliconFlowReranker extends BaseReranker {
}
try {
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
const { data } = await axiosProxy.axios.post(url, requestBody, { headers: this.defaultHeaders() })
const rerankResults = data.results
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
return searchResults
.map((doc: ExtractChunkData, index: number) => {
const score = resultMap.get(index)
if (score === undefined) return undefined
return {
...doc,
score
}
})
.filter((doc): doc is ExtractChunkData => doc !== undefined)
.sort((a, b) => b.score - a.score)
return this.getRerankResult(searchResults, rerankResults)
} catch (error: any) {
const errorDetails = this.formatErrorMessage(url, error, requestBody)

View File

@ -1,6 +1,6 @@
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import axiosProxy from '@main/services/AxiosProxy'
import { KnowledgeBaseParams } from '@types'
import axios from 'axios'
import BaseReranker from './BaseReranker'
@ -10,15 +10,7 @@ export default class VoyageReranker extends BaseReranker {
}
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
? this.base.rerankBaseURL.slice(0, -1)
: this.base.rerankBaseURL
if (baseURL && !baseURL.endsWith('/v1')) {
baseURL = `${baseURL}/v1`
}
const url = `${baseURL}/rerank`
const url = this.getRerankUrl()
const requestBody = {
model: this.base.rerankModel,
@ -30,28 +22,14 @@ export default class VoyageReranker extends BaseReranker {
}
try {
const { data } = await axios.post(url, requestBody, {
const { data } = await axiosProxy.axios.post(url, requestBody, {
headers: {
...this.defaultHeaders()
}
})
const rerankResults = data.data
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
return searchResults
.map((doc: ExtractChunkData, index: number) => {
const score = resultMap.get(index)
if (score === undefined) return undefined
return {
...doc,
score
}
})
.filter((doc): doc is ExtractChunkData => doc !== undefined)
.sort((a, b) => b.score - a.score)
return this.getRerankResult(searchResults, rerankResults)
} catch (error: any) {
const errorDetails = this.formatErrorMessage(url, error, requestBody)

View File

@ -1,9 +1,11 @@
import { IpcChannel } from '@shared/IpcChannel'
import { UpdateInfo } from 'builder-util-runtime'
import { app, BrowserWindow, dialog } from 'electron'
import logger from 'electron-log'
import { AppUpdater as _AppUpdater, autoUpdater } from 'electron-updater'
import icon from '../../../build/icon.png?asset'
import { configManager } from './ConfigManager'
export default class AppUpdater {
autoUpdater: _AppUpdater = autoUpdater
@ -14,7 +16,8 @@ export default class AppUpdater {
autoUpdater.logger = logger
autoUpdater.forceDevUpdateConfig = !app.isPackaged
autoUpdater.autoDownload = true
autoUpdater.autoDownload = configManager.getAutoUpdate()
autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate()
// 检测下载错误
autoUpdater.on('error', (error) => {
@ -24,27 +27,27 @@ export default class AppUpdater {
stack: error.stack,
time: new Date().toISOString()
})
mainWindow.webContents.send('update-error', error)
mainWindow.webContents.send(IpcChannel.UpdateError, error)
})
autoUpdater.on('update-available', (releaseInfo: UpdateInfo) => {
logger.info('检测到新版本', releaseInfo)
mainWindow.webContents.send('update-available', releaseInfo)
mainWindow.webContents.send(IpcChannel.UpdateAvailable, releaseInfo)
})
// 检测到不需要更新时
autoUpdater.on('update-not-available', () => {
mainWindow.webContents.send('update-not-available')
mainWindow.webContents.send(IpcChannel.UpdateNotAvailable)
})
// 更新下载进度
autoUpdater.on('download-progress', (progress) => {
mainWindow.webContents.send('download-progress', progress)
mainWindow.webContents.send(IpcChannel.DownloadProgress, progress)
})
// 当需要更新的内容下载完成后
autoUpdater.on('update-downloaded', (releaseInfo: UpdateInfo) => {
mainWindow.webContents.send('update-downloaded', releaseInfo)
mainWindow.webContents.send(IpcChannel.UpdateDownloaded, releaseInfo)
this.releaseInfo = releaseInfo
logger.info('下载完成', releaseInfo)
})
@ -52,6 +55,11 @@ export default class AppUpdater {
this.autoUpdater = autoUpdater
}
public setAutoUpdate(isActive: boolean) {
autoUpdater.autoDownload = isActive
autoUpdater.autoInstallOnAppQuit = isActive
}
public async showUpdateDialog(mainWindow: BrowserWindow) {
if (!this.releaseInfo) {
return
@ -73,7 +81,7 @@ export default class AppUpdater {
app.isQuitting = true
setImmediate(() => autoUpdater.quitAndInstall())
} else {
mainWindow.webContents.send('update-downloaded-cancelled')
mainWindow.webContents.send(IpcChannel.UpdateDownloadedCancelled)
}
})
}

View File

@ -0,0 +1,29 @@
import { AxiosInstance, default as axios_ } from 'axios'
import { ProxyAgent } from 'proxy-agent'
import { proxyManager } from './ProxyManager'
class AxiosProxy {
private cacheAxios: AxiosInstance | null = null
private proxyAgent: ProxyAgent | null = null
get axios(): AxiosInstance {
const currentProxyAgent = proxyManager.getProxyAgent()
// 如果代理发生变化或尚未初始化,则重新创建 axios 实例
if (this.cacheAxios === null || (currentProxyAgent !== null && this.proxyAgent !== currentProxyAgent)) {
this.proxyAgent = currentProxyAgent
// 创建带有代理配置的 axios 实例
this.cacheAxios = axios_.create({
proxy: false,
httpAgent: currentProxyAgent || undefined,
httpsAgent: currentProxyAgent || undefined
})
}
return this.cacheAxios
}
}
export default new AxiosProxy()

View File

@ -1,8 +1,10 @@
import { IpcChannel } from '@shared/IpcChannel'
import { WebDavConfig } from '@types'
import AdmZip from 'adm-zip'
import archiver from 'archiver'
import { exec } from 'child_process'
import { app } from 'electron'
import Logger from 'electron-log'
import extract from 'extract-zip'
import * as fs from 'fs-extra'
import * as path from 'path'
import { createClient, CreateDirectoryOptions, FileStat } from 'webdav'
@ -21,6 +23,7 @@ class BackupManager {
this.backupToWebdav = this.backupToWebdav.bind(this)
this.restoreFromWebdav = this.restoreFromWebdav.bind(this)
this.listWebdavFiles = this.listWebdavFiles.bind(this)
this.deleteWebdavFile = this.deleteWebdavFile.bind(this)
}
private async setWritableRecursive(dirPath: string): Promise<void> {
@ -79,7 +82,7 @@ class BackupManager {
const mainWindow = windowService.getMainWindow()
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
mainWindow?.webContents.send('backup-progress', processData)
mainWindow?.webContents.send(IpcChannel.BackupProgress, processData)
Logger.log('[BackupManager] backup progress', processData)
}
@ -87,9 +90,18 @@ class BackupManager {
await fs.ensureDir(this.tempDir)
onProgress({ stage: 'preparing', progress: 0, total: 100 })
// 将 data 写入临时文件
// 使用流的方式写入 data.json
const tempDataPath = path.join(this.tempDir, 'data.json')
await fs.writeFile(tempDataPath, data)
await new Promise<void>((resolve, reject) => {
const writeStream = fs.createWriteStream(tempDataPath)
writeStream.write(data)
writeStream.end()
writeStream.on('finish', () => resolve())
writeStream.on('error', (error) => reject(error))
})
onProgress({ stage: 'writing_data', progress: 20, total: 100 })
// 复制 Data 目录到临时目录
@ -103,18 +115,92 @@ class BackupManager {
// 使用流式复制
await this.copyDirWithProgress(sourcePath, tempDataDir, (size) => {
copiedSize += size
const progress = Math.min(80, 20 + Math.floor((copiedSize / totalSize) * 60))
const progress = Math.min(50, Math.floor((copiedSize / totalSize) * 50))
onProgress({ stage: 'copying_files', progress, total: 100 })
})
await this.setWritableRecursive(tempDataDir)
onProgress({ stage: 'compressing', progress: 80, total: 100 })
onProgress({ stage: 'preparing_compression', progress: 50, total: 100 })
// 使用 adm-zip 创建压缩文件
const zip = new AdmZip()
zip.addLocalFolder(this.tempDir)
// 创建输出文件流
const backupedFilePath = path.join(destinationPath, fileName)
zip.writeZip(backupedFilePath)
const output = fs.createWriteStream(backupedFilePath)
// 创建 archiver 实例,启用 ZIP64 支持
const archive = archiver('zip', {
zlib: { level: 1 }, // 使用最低压缩级别以提高速度
zip64: true // 启用 ZIP64 支持以处理大文件
})
let lastProgress = 50
let totalEntries = 0
let processedEntries = 0
let totalBytes = 0
let processedBytes = 0
// 首先计算总文件数和总大小
const calculateTotals = async (dirPath: string) => {
const items = await fs.readdir(dirPath, { withFileTypes: true })
for (const item of items) {
const fullPath = path.join(dirPath, item.name)
if (item.isDirectory()) {
await calculateTotals(fullPath)
} else {
totalEntries++
const stats = await fs.stat(fullPath)
totalBytes += stats.size
}
}
}
await calculateTotals(this.tempDir)
// 监听文件添加事件
archive.on('entry', () => {
processedEntries++
if (totalEntries > 0) {
const progressPercent = Math.min(55, 50 + Math.floor((processedEntries / totalEntries) * 5))
if (progressPercent > lastProgress) {
lastProgress = progressPercent
onProgress({ stage: 'compressing', progress: progressPercent, total: 100 })
}
}
})
// 监听数据写入事件
archive.on('data', (chunk) => {
processedBytes += chunk.length
if (totalBytes > 0) {
const progressPercent = Math.min(99, 55 + Math.floor((processedBytes / totalBytes) * 44))
if (progressPercent > lastProgress) {
lastProgress = progressPercent
onProgress({ stage: 'compressing', progress: progressPercent, total: 100 })
}
}
})
// 使用 Promise 等待压缩完成
await new Promise<void>((resolve, reject) => {
output.on('close', () => {
onProgress({ stage: 'compressing', progress: 100, total: 100 })
resolve()
})
archive.on('error', reject)
archive.on('warning', (err: any) => {
if (err.code !== 'ENOENT') {
Logger.warn('[BackupManager] Archive warning:', err)
}
})
// 将输出流连接到压缩器
archive.pipe(output)
// 添加整个临时目录到压缩文件
archive.directory(this.tempDir, false)
// 完成压缩
archive.finalize()
})
// 清理临时目录
await fs.remove(this.tempDir)
@ -124,6 +210,8 @@ class BackupManager {
return backupedFilePath
} catch (error) {
Logger.error('[BackupManager] Backup failed:', error)
// 确保清理临时目录
await fs.remove(this.tempDir).catch(() => {})
throw error
}
}
@ -132,7 +220,7 @@ class BackupManager {
const mainWindow = windowService.getMainWindow()
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
mainWindow?.webContents.send('restore-progress', processData)
mainWindow?.webContents.send(IpcChannel.RestoreProgress, processData)
Logger.log('[BackupManager] restore progress', processData)
}
@ -142,16 +230,22 @@ class BackupManager {
onProgress({ stage: 'preparing', progress: 0, total: 100 })
Logger.log('[backup] step 1: unzip backup file', this.tempDir)
// 使用 adm-zip 解压
const zip = new AdmZip(backupPath)
zip.extractAllTo(this.tempDir, true) // true 表示覆盖已存在的文件
onProgress({ stage: 'extracting', progress: 20, total: 100 })
// 使用 extract-zip 解压
await extract(backupPath, {
dir: this.tempDir,
onEntry: () => {
// 这里可以处理进度,但 extract-zip 不提供总条目数信息
onProgress({ stage: 'extracting', progress: 15, total: 100 })
}
})
onProgress({ stage: 'extracting', progress: 25, total: 100 })
Logger.log('[backup] step 2: read data.json')
// 读取 data.json
const dataPath = path.join(this.tempDir, 'data.json')
const data = await fs.readFile(dataPath, 'utf-8')
onProgress({ stage: 'reading_data', progress: 40, total: 100 })
onProgress({ stage: 'reading_data', progress: 35, total: 100 })
Logger.log('[backup] step 3: restore Data directory')
// 恢复 Data 目录
@ -168,7 +262,7 @@ class BackupManager {
// 使用流式复制
await this.copyDirWithProgress(sourcePath, destPath, (size) => {
copiedSize += size
const progress = Math.min(90, 40 + Math.floor((copiedSize / totalSize) * 50))
const progress = Math.min(85, 35 + Math.floor((copiedSize / totalSize) * 50))
onProgress({ stage: 'copying_files', progress, total: 100 })
})
@ -208,8 +302,15 @@ class BackupManager {
fs.mkdirSync(this.backupDir, { recursive: true })
}
// sync为同步写无须await
fs.writeFileSync(backupedFilePath, retrievedFile as Buffer)
// 使用流的方式写入文件
await new Promise<void>((resolve, reject) => {
const writeStream = fs.createWriteStream(backupedFilePath)
writeStream.write(retrievedFile as Buffer)
writeStream.end()
writeStream.on('finish', () => resolve())
writeStream.on('error', (error) => reject(error))
})
return await this.restore(_, backupedFilePath)
} catch (error: any) {
@ -294,6 +395,16 @@ class BackupManager {
const webdavClient = new WebDav(webdavConfig)
return await webdavClient.createDirectory(path, options)
}
async deleteWebdavFile(_: Electron.IpcMainInvokeEvent, fileName: string, webdavConfig: WebDavConfig) {
try {
const webdavClient = new WebDav(webdavConfig)
return await webdavClient.deleteFile(fileName)
} catch (error: any) {
Logger.error('Failed to delete WebDAV file:', error)
throw new Error(error.message || 'Failed to delete backup file')
}
}
}
export default BackupManager

View File

@ -1,10 +1,24 @@
import { ZOOM_SHORTCUTS } from '@shared/config/constant'
import { defaultLanguage, ZOOM_SHORTCUTS } from '@shared/config/constant'
import { LanguageVarious, Shortcut, ThemeMode } from '@types'
import { app } from 'electron'
import Store from 'electron-store'
import { locales } from '../utils/locales'
enum ConfigKeys {
Language = 'language',
Theme = 'theme',
LaunchToTray = 'launchToTray',
Tray = 'tray',
TrayOnClose = 'trayOnClose',
ZoomFactor = 'ZoomFactor',
Shortcuts = 'shortcuts',
ClickTrayToShowQuickAssistant = 'clickTrayToShowQuickAssistant',
EnableQuickAssistant = 'enableQuickAssistant',
AutoUpdate = 'autoUpdate',
EnableDataCollection = 'enableDataCollection'
}
export class ConfigManager {
private store: Store
private subscribers: Map<string, Array<(newValue: any) => void>> = new Map()
@ -14,54 +28,62 @@ export class ConfigManager {
}
getLanguage(): LanguageVarious {
const locale = Object.keys(locales).includes(app.getLocale()) ? app.getLocale() : 'en-US'
return this.store.get('language', locale) as LanguageVarious
const locale = Object.keys(locales).includes(app.getLocale()) ? app.getLocale() : defaultLanguage
return this.get(ConfigKeys.Language, locale) as LanguageVarious
}
setLanguage(theme: LanguageVarious) {
this.store.set('language', theme)
this.set(ConfigKeys.Language, theme)
}
getTheme(): ThemeMode {
return this.store.get('theme', ThemeMode.light) as ThemeMode
return this.get(ConfigKeys.Theme, ThemeMode.light)
}
setTheme(theme: ThemeMode) {
this.store.set('theme', theme)
this.set(ConfigKeys.Theme, theme)
}
getCustomCss(): string {
return this.store.get('customCss', '') as string
}
setCustomCss(css: string) {
this.store.set('customCss', css)
}
getLaunchToTray(): boolean {
return !!this.store.get('launchToTray', false)
return !!this.get(ConfigKeys.LaunchToTray, false)
}
setLaunchToTray(value: boolean) {
this.store.set('launchToTray', value)
this.set(ConfigKeys.LaunchToTray, value)
}
getTray(): boolean {
return !!this.store.get('tray', true)
return !!this.get(ConfigKeys.Tray, true)
}
setTray(value: boolean) {
this.store.set('tray', value)
this.notifySubscribers('tray', value)
this.set(ConfigKeys.Tray, value)
this.notifySubscribers(ConfigKeys.Tray, value)
}
getTrayOnClose(): boolean {
return !!this.store.get('trayOnClose', true)
return !!this.get(ConfigKeys.TrayOnClose, true)
}
setTrayOnClose(value: boolean) {
this.store.set('trayOnClose', value)
this.set(ConfigKeys.TrayOnClose, value)
}
getZoomFactor(): number {
return this.store.get('zoomFactor', 1) as number
return this.get<number>(ConfigKeys.ZoomFactor, 1)
}
setZoomFactor(factor: number) {
this.store.set('zoomFactor', factor)
this.notifySubscribers('zoomFactor', factor)
this.set(ConfigKeys.ZoomFactor, factor)
this.notifySubscribers(ConfigKeys.ZoomFactor, factor)
}
subscribe<T>(key: string, callback: (newValue: T) => void) {
@ -89,39 +111,55 @@ export class ConfigManager {
}
getShortcuts() {
return this.store.get('shortcuts', ZOOM_SHORTCUTS) as Shortcut[] | []
return this.get(ConfigKeys.Shortcuts, ZOOM_SHORTCUTS) as Shortcut[] | []
}
setShortcuts(shortcuts: Shortcut[]) {
this.store.set(
'shortcuts',
this.set(
ConfigKeys.Shortcuts,
shortcuts.filter((shortcut) => shortcut.system)
)
this.notifySubscribers('shortcuts', shortcuts)
this.notifySubscribers(ConfigKeys.Shortcuts, shortcuts)
}
getClickTrayToShowQuickAssistant(): boolean {
return this.store.get('clickTrayToShowQuickAssistant', false) as boolean
return this.get<boolean>(ConfigKeys.ClickTrayToShowQuickAssistant, false)
}
setClickTrayToShowQuickAssistant(value: boolean) {
this.store.set('clickTrayToShowQuickAssistant', value)
this.set(ConfigKeys.ClickTrayToShowQuickAssistant, value)
}
getEnableQuickAssistant(): boolean {
return this.store.get('enableQuickAssistant', false) as boolean
return this.get(ConfigKeys.EnableQuickAssistant, false)
}
setEnableQuickAssistant(value: boolean) {
this.store.set('enableQuickAssistant', value)
this.set(ConfigKeys.EnableQuickAssistant, value)
}
set(key: string, value: any) {
getAutoUpdate(): boolean {
return this.get<boolean>(ConfigKeys.AutoUpdate, true)
}
setAutoUpdate(value: boolean) {
this.set(ConfigKeys.AutoUpdate, value)
}
getEnableDataCollection(): boolean {
return this.get<boolean>(ConfigKeys.EnableDataCollection, true)
}
setEnableDataCollection(value: boolean) {
this.set(ConfigKeys.EnableDataCollection, value)
}
set(key: string, value: unknown) {
this.store.set(key, value)
}
get(key: string) {
return this.store.get(key)
get<T>(key: string, defaultValue?: T) {
return this.store.get(key, defaultValue) as T
}
}

View File

@ -1,8 +1,10 @@
import axios, { AxiosRequestConfig } from 'axios'
import { AxiosRequestConfig } from 'axios'
import { app, safeStorage } from 'electron'
import fs from 'fs/promises'
import path from 'path'
import aoxisProxy from './AxiosProxy'
// 配置常量,集中管理
const CONFIG = {
GITHUB_CLIENT_ID: 'Iv1.b507a08c87ecfe98',
@ -93,7 +95,7 @@ class CopilotService {
}
}
const response = await axios.get(CONFIG.API_URLS.GITHUB_USER, config)
const response = await aoxisProxy.axios.get(CONFIG.API_URLS.GITHUB_USER, config)
return {
login: response.data.login,
avatar: response.data.avatar_url
@ -114,7 +116,7 @@ class CopilotService {
try {
this.updateHeaders(headers)
const response = await axios.post<AuthResponse>(
const response = await aoxisProxy.axios.post<AuthResponse>(
CONFIG.API_URLS.GITHUB_DEVICE_CODE,
{
client_id: CONFIG.GITHUB_CLIENT_ID,
@ -146,7 +148,7 @@ class CopilotService {
await this.delay(currentDelay)
try {
const response = await axios.post<TokenResponse>(
const response = await aoxisProxy.axios.post<TokenResponse>(
CONFIG.API_URLS.GITHUB_ACCESS_TOKEN,
{
client_id: CONFIG.GITHUB_CLIENT_ID,
@ -208,7 +210,7 @@ class CopilotService {
}
}
const response = await axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
const response = await aoxisProxy.axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
return response.data
} catch (error) {

View File

@ -1,5 +1,5 @@
import { getFilesDir, getFileType, getTempDir } from '@main/utils/file'
import { documentExts, imageExts } from '@shared/config/constant'
import { documentExts, imageExts, MB } from '@shared/config/constant'
import { FileType } from '@types'
import * as crypto from 'crypto'
import {
@ -122,7 +122,7 @@ class FileStorage {
private async compressImage(sourcePath: string, destPath: string): Promise<void> {
try {
const stats = fs.statSync(sourcePath)
const fileSizeInMB = stats.size / (1024 * 1024)
const fileSizeInMB = stats.size / MB
// 如果图片大于1MB才进行压缩
if (fileSizeInMB > 1) {

View File

@ -1,4 +1,4 @@
import { FileMetadataResponse, FileState, GoogleAIFileManager } from '@google/generative-ai/server'
import { File, FileState, GoogleGenAI, Pager } from '@google/genai'
import { FileType } from '@types'
import fs from 'fs'
@ -8,11 +8,15 @@ export class GeminiService {
private static readonly FILE_LIST_CACHE_KEY = 'gemini_file_list'
private static readonly CACHE_DURATION = 3000
static async uploadFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string) {
const fileManager = new GoogleAIFileManager(apiKey)
const uploadResult = await fileManager.uploadFile(file.path, {
mimeType: 'application/pdf',
displayName: file.origin_name
static async uploadFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string): Promise<File> {
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
const uploadResult = await sdk.files.upload({
file: file.path,
config: {
mimeType: 'application/pdf',
name: file.id,
displayName: file.origin_name
}
})
return uploadResult
}
@ -24,40 +28,42 @@ export class GeminiService {
}
}
static async retrieveFile(
_: Electron.IpcMainInvokeEvent,
file: FileType,
apiKey: string
): Promise<FileMetadataResponse | undefined> {
const fileManager = new GoogleAIFileManager(apiKey)
static async retrieveFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string): Promise<File | undefined> {
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
const cachedResponse = CacheService.get<any>(GeminiService.FILE_LIST_CACHE_KEY)
if (cachedResponse) {
return GeminiService.processResponse(cachedResponse, file)
}
const response = await fileManager.listFiles()
const response = await sdk.files.list()
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, response, GeminiService.CACHE_DURATION)
return GeminiService.processResponse(response, file)
}
private static processResponse(response: any, file: FileType) {
if (response.files) {
return response.files
.filter((file) => file.state === FileState.ACTIVE)
.find((i) => i.displayName === file.origin_name && Number(i.sizeBytes) === file.size)
private static async processResponse(response: Pager<File>, file: FileType) {
for await (const f of response) {
if (f.state === FileState.ACTIVE) {
if (f.displayName === file.origin_name && Number(f.sizeBytes) === file.size) {
return f
}
}
}
return undefined
}
static async listFiles(_: Electron.IpcMainInvokeEvent, apiKey: string) {
const fileManager = new GoogleAIFileManager(apiKey)
return await fileManager.listFiles()
static async listFiles(_: Electron.IpcMainInvokeEvent, apiKey: string): Promise<File[]> {
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
const files: File[] = []
for await (const f of await sdk.files.list()) {
files.push(f)
}
return files
}
static async deleteFile(_: Electron.IpcMainInvokeEvent, apiKey: string, fileId: string) {
const fileManager = new GoogleAIFileManager(apiKey)
await fileManager.deleteFile(fileId)
static async deleteFile(_: Electron.IpcMainInvokeEvent, fileId: string, apiKey: string) {
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
await sdk.files.delete({ name: fileId })
}
}

View File

@ -26,7 +26,9 @@ import { addFileLoader } from '@main/loader'
import Reranker from '@main/reranker/Reranker'
import { windowService } from '@main/services/WindowService'
import { getAllFiles } from '@main/utils/file'
import { MB } from '@shared/config/constant'
import type { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types'
import { app } from 'electron'
import Logger from 'electron-log'
@ -91,7 +93,7 @@ class KnowledgeService {
private workload = 0
private processingItemCount = 0
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private static MAXIMUM_WORKLOAD = 1024 * 1024 * 80
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
@ -194,7 +196,7 @@ class KnowledgeService {
const sendDirectoryProcessingPercent = (totalFiles: number, processedFiles: number) => {
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('directory-processing-percent', {
mainWindow?.webContents.send(IpcChannel.DirectoryProcessingPercent, {
itemId: item.id,
percent: (processedFiles / totalFiles) * 100
})
@ -270,7 +272,7 @@ class KnowledgeService {
return KnowledgeService.ERROR_LOADER_RETURN
})
},
evaluateTaskWorkload: { workload: 1024 * 1024 * 2 }
evaluateTaskWorkload: { workload: 2 * MB }
}
],
loaderDoneReturn: null
@ -309,7 +311,7 @@ class KnowledgeService {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
}),
evaluateTaskWorkload: { workload: 1024 * 1024 * 20 }
evaluateTaskWorkload: { workload: 20 * MB }
}
],
loaderDoneReturn: null

View File

@ -1,16 +1,69 @@
import crypto from 'node:crypto'
import fs from 'node:fs'
import os from 'node:os'
import path from 'node:path'
import { isLinux, isMac, isWin } from '@main/constant'
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
import { makeSureDirExists } from '@main/utils'
import { getBinaryName, getBinaryPath } from '@main/utils/process'
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js'
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'
import { SSEClientTransport, SSEClientTransportOptions } from '@modelcontextprotocol/sdk/client/sse.js'
import { getDefaultEnvironment, StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory'
import { nanoid } from '@reduxjs/toolkit'
import { MCPServer, MCPTool } from '@types'
import {
GetMCPPromptResponse,
GetResourceResponse,
MCPCallToolResponse,
MCPPrompt,
MCPResource,
MCPServer,
MCPTool
} from '@types'
import { app } from 'electron'
import Logger from 'electron-log'
import { EventEmitter } from 'events'
import { memoize } from 'lodash'
import { CacheService } from './CacheService'
import { CallBackServer } from './mcp/oauth/callback'
import { McpOAuthClientProvider } from './mcp/oauth/provider'
import { StreamableHTTPClientTransport, type StreamableHTTPClientTransportOptions } from './MCPStreamableHttpClient'
// Generic type for caching wrapped functions
type CachedFunction<T extends unknown[], R> = (...args: T) => Promise<R>
/**
* Higher-order function to add caching capability to any async function
* @param fn The original function to be wrapped with caching
* @param getCacheKey Function to generate a cache key from the function arguments
* @param ttl Time to live for the cache entry in milliseconds
* @param logPrefix Prefix for log messages
* @returns The wrapped function with caching capability
*/
function withCache<T extends unknown[], R>(
fn: (...args: T) => Promise<R>,
getCacheKey: (...args: T) => string,
ttl: number,
logPrefix: string
): CachedFunction<T, R> {
return async (...args: T): Promise<R> => {
const cacheKey = getCacheKey(...args)
if (CacheService.has(cacheKey)) {
Logger.info(`${logPrefix} loaded from cache`)
const cachedData = CacheService.get<R>(cacheKey)
if (cachedData) {
return cachedData
}
}
const result = await fn(...args)
CacheService.set(cacheKey, result, ttl)
return result
}
}
class McpService {
private clients: Map<string, Client> = new Map()
@ -20,6 +73,7 @@ class McpService {
baseUrl: server.baseUrl,
command: server.command,
args: server.args,
registryUrl: server.registryUrl,
env: server.env,
id: server.id
})
@ -29,10 +83,15 @@ class McpService {
this.initClient = this.initClient.bind(this)
this.listTools = this.listTools.bind(this)
this.callTool = this.callTool.bind(this)
this.listPrompts = this.listPrompts.bind(this)
this.getPrompt = this.getPrompt.bind(this)
this.listResources = this.listResources.bind(this)
this.getResource = this.getResource.bind(this)
this.closeClient = this.closeClient.bind(this)
this.removeServer = this.removeServer.bind(this)
this.restartServer = this.restartServer.bind(this)
this.stopServer = this.stopServer.bind(this)
this.cleanup = this.cleanup.bind(this)
}
async initClient(server: MCPServer): Promise<Client> {
@ -41,39 +100,78 @@ class McpService {
// Check if we already have a client for this server configuration
const existingClient = this.clients.get(serverKey)
if (existingClient) {
// Check if the existing client is still connected
const pingResult = await existingClient.ping()
Logger.info(`[MCP] Ping result for ${server.name}:`, pingResult)
// If the ping fails, remove the client from the cache
// and create a new one
if (!pingResult) {
try {
// Check if the existing client is still connected
const pingResult = await existingClient.ping()
Logger.info(`[MCP] Ping result for ${server.name}:`, pingResult)
// If the ping fails, remove the client from the cache
// and create a new one
if (!pingResult) {
this.clients.delete(serverKey)
} else {
return existingClient
}
} catch (error) {
Logger.error(`[MCP] Error pinging server ${server.name}:`, error)
this.clients.delete(serverKey)
} else {
return existingClient
}
}
// Create new client instance for each connection
const client = new Client({ name: 'Cherry Studio', version: app.getVersion() }, { capabilities: {} })
const args = [...(server.args || [])]
let transport: StdioClientTransport | SSEClientTransport
// let transport: StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
const authProvider = new McpOAuthClientProvider({
serverUrlHash: crypto
.createHash('md5')
.update(server.baseUrl || '')
.digest('hex')
})
try {
const initTransport = async (): Promise<
StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
> => {
// Create appropriate transport based on configuration
if (server.baseUrl) {
transport = new SSEClientTransport(new URL(server.baseUrl))
if (server.type === 'inMemory') {
Logger.info(`[MCP] Using in-memory transport for server: ${server.name}`)
const [clientTransport, serverTransport] = InMemoryTransport.createLinkedPair()
// start the in-memory server with the given name and environment variables
const inMemoryServer = createInMemoryMCPServer(server.name, args, server.env || {})
try {
await inMemoryServer.connect(serverTransport)
Logger.info(`[MCP] In-memory server started: ${server.name}`)
} catch (error: Error | any) {
Logger.error(`[MCP] Error starting in-memory server: ${error}`)
throw new Error(`Failed to start in-memory server: ${error.message}`)
}
// set the client transport to the client
return clientTransport
} else if (server.baseUrl) {
if (server.type === 'streamableHttp') {
const options: StreamableHTTPClientTransportOptions = {
requestInit: {
headers: server.headers || {}
},
authProvider
}
return new StreamableHTTPClientTransport(new URL(server.baseUrl!), options)
} else if (server.type === 'sse') {
const options: SSEClientTransportOptions = {
requestInit: {
headers: server.headers || {}
},
authProvider
}
return new SSEClientTransport(new URL(server.baseUrl!), options)
} else {
throw new Error('Invalid server type')
}
} else if (server.command) {
let cmd = server.command
if (server.command === 'npx') {
cmd = await getBinaryPath('bun')
if (cmd === 'bun') {
cmd = 'npx'
}
Logger.info(`[MCP] Using command: ${cmd}`)
// add -x to args if args exist
@ -81,28 +179,113 @@ class McpService {
if (!args.includes('-y')) {
!args.includes('-y') && args.unshift('-y')
}
if (cmd.includes('bun') && !args.includes('x')) {
if (!args.includes('x')) {
args.unshift('x')
}
}
if (server.registryUrl) {
server.env = {
...server.env,
NPM_CONFIG_REGISTRY: server.registryUrl
}
// if the server name is mcp-auto-install, use the mcp-registry.json file in the bin directory
if (server.name.includes('mcp-auto-install')) {
const binPath = await getBinaryPath()
makeSureDirExists(binPath)
server.env.MCP_REGISTRY_PATH = path.join(binPath, '..', 'config', 'mcp-registry.json')
}
}
} else if (server.command === 'uvx' || server.command === 'uv') {
cmd = await getBinaryPath(server.command)
if (server.registryUrl) {
server.env = {
...server.env,
UV_DEFAULT_INDEX: server.registryUrl,
PIP_INDEX_URL: server.registryUrl
}
}
}
if (server.command === 'uvx') {
cmd = await getBinaryPath('uvx')
}
Logger.info(`[MCP] Starting server with command: ${cmd} ${args ? args.join(' ') : ''}`)
// Logger.info(`[MCP] Environment variables for server:`, server.env)
transport = new StdioClientTransport({
const stdioTransport = new StdioClientTransport({
command: cmd,
args,
env: server.env
env: {
...getDefaultEnvironment(),
PATH: await this.getEnhancedPath(process.env.PATH || ''),
...server.env
},
stderr: 'pipe'
})
stdioTransport.stderr?.on('data', (data) =>
Logger.info(`[MCP] Stdio stderr for server: ${server.name} `, data.toString())
)
return stdioTransport
} else {
throw new Error('Either baseUrl or command must be provided')
}
}
await client.connect(transport)
const handleAuth = async (client: Client, transport: SSEClientTransport | StreamableHTTPClientTransport) => {
Logger.info(`[MCP] Starting OAuth flow for server: ${server.name}`)
// Create an event emitter for the OAuth callback
const events = new EventEmitter()
// Create a callback server
const callbackServer = new CallBackServer({
port: authProvider.config.callbackPort,
path: authProvider.config.callbackPath || '/oauth/callback',
events
})
// Set a timeout to close the callback server
const timeoutId = setTimeout(() => {
Logger.warn(`[MCP] OAuth flow timed out for server: ${server.name}`)
callbackServer.close()
}, 300000) // 5 minutes timeout
try {
// Wait for the authorization code
const authCode = await callbackServer.waitForAuthCode()
Logger.info(`[MCP] Received auth code: ${authCode}`)
// Complete the OAuth flow
await transport.finishAuth(authCode)
Logger.info(`[MCP] OAuth flow completed for server: ${server.name}`)
const newTransport = await initTransport()
// Try to connect again
await client.connect(newTransport)
Logger.info(`[MCP] Successfully authenticated with server: ${server.name}`)
} catch (oauthError) {
Logger.error(`[MCP] OAuth authentication failed for server ${server.name}:`, oauthError)
throw new Error(
`OAuth authentication failed: ${oauthError instanceof Error ? oauthError.message : String(oauthError)}`
)
} finally {
// Clear the timeout and close the callback server
clearTimeout(timeoutId)
callbackServer.close()
}
}
try {
const transport = await initTransport()
try {
await client.connect(transport)
} catch (error: Error | any) {
if (error instanceof Error && (error.name === 'UnauthorizedError' || error.message.includes('Unauthorized'))) {
Logger.info(`[MCP] Authentication required for server: ${server.name}`)
await handleAuth(client, transport as SSEClientTransport | StreamableHTTPClientTransport)
} else {
throw error
}
}
// Store the new client in the cache
this.clients.set(serverKey, client)
@ -111,7 +294,7 @@ class McpService {
return client
} catch (error: any) {
Logger.error(`[MCP] Error activating server ${server.name}:`, error)
throw error
throw new Error(`[MCP] Error activating server ${server.name}: ${error.message}`)
}
}
@ -122,6 +305,8 @@ class McpService {
await client.close()
Logger.info(`[MCP] Closed server: ${serverKey}`)
this.clients.delete(serverKey)
CacheService.remove(`mcp:list_tool:${serverKey}`)
Logger.info(`[MCP] Cleared cache for server: ${serverKey}`)
} else {
Logger.warn(`[MCP] No client found for server: ${serverKey}`)
}
@ -148,30 +333,50 @@ class McpService {
await this.initClient(server)
}
async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
const client = await this.initClient(server)
const cacheKey = `mcp:list_tool:${server.id}`
if (CacheService.has(cacheKey)) {
Logger.info(`[MCP] Tools from ${server.name} loaded from cache`)
const cachedTools = CacheService.get<MCPTool[]>(cacheKey)
if (cachedTools && cachedTools.length > 0) {
return cachedTools
async cleanup() {
for (const [key] of this.clients) {
try {
await this.closeClient(key)
} catch (error) {
Logger.error(`[MCP] Failed to close client: ${error}`)
}
}
}
private async listToolsImpl(server: MCPServer): Promise<MCPTool[]> {
Logger.info(`[MCP] Listing tools for server: ${server.name}`)
const { tools } = await client.listTools()
const serverTools: MCPTool[] = []
tools.map((tool: any) => {
const serverTool: MCPTool = {
...tool,
id: nanoid(),
serverId: server.id,
serverName: server.name
}
serverTools.push(serverTool)
})
CacheService.set(cacheKey, serverTools, 5 * 60 * 1000)
return serverTools
const client = await this.initClient(server)
try {
const { tools } = await client.listTools()
const serverTools: MCPTool[] = []
tools.map((tool: any) => {
const serverTool: MCPTool = {
...tool,
id: `f${nanoid()}`,
serverId: server.id,
serverName: server.name
}
serverTools.push(serverTool)
})
return serverTools
} catch (error) {
Logger.error(`[MCP] Failed to list tools for server: ${server.name}`, error)
return []
}
}
async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
const cachedListTools = withCache<[MCPServer], MCPTool[]>(
this.listToolsImpl.bind(this),
(server) => {
const serverKey = this.getServerKey(server)
return `mcp:list_tool:${serverKey}`
},
5 * 60 * 1000, // 5 minutes TTL
`[MCP] Tools from ${server.name}`
)
return cachedListTools(server)
}
/**
@ -180,12 +385,12 @@ class McpService {
public async callTool(
_: Electron.IpcMainInvokeEvent,
{ server, name, args }: { server: MCPServer; name: string; args: any }
): Promise<any> {
): Promise<MCPCallToolResponse> {
try {
Logger.info('[MCP] Calling:', server.name, name, args)
const client = await this.initClient(server)
const result = await client.callTool({ name, arguments: args })
return result
return result as MCPCallToolResponse
} catch (error) {
Logger.error(`[MCP] Error calling tool ${name} on ${server.name}:`, error)
throw error
@ -200,6 +405,301 @@ class McpService {
const bunPath = path.join(dir, bunName)
return { dir, uvPath, bunPath }
}
/**
* List prompts available on an MCP server
*/
private async listPromptsImpl(server: MCPServer): Promise<MCPPrompt[]> {
Logger.info(`[MCP] Listing prompts for server: ${server.name}`)
const client = await this.initClient(server)
try {
const { prompts } = await client.listPrompts()
const serverPrompts = prompts.map((prompt: any) => ({
...prompt,
id: `p${nanoid()}`,
serverId: server.id,
serverName: server.name
}))
return serverPrompts
} catch (error) {
Logger.error(`[MCP] Failed to list prompts for server: ${server.name}`, error)
return []
}
}
/**
* List prompts available on an MCP server with caching
*/
public async listPrompts(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<MCPPrompt[]> {
const cachedListPrompts = withCache<[MCPServer], MCPPrompt[]>(
this.listPromptsImpl.bind(this),
(server) => {
const serverKey = this.getServerKey(server)
return `mcp:list_prompts:${serverKey}`
},
60 * 60 * 1000, // 60 minutes TTL
`[MCP] Prompts from ${server.name}`
)
return cachedListPrompts(server)
}
/**
* Get a specific prompt from an MCP server (implementation)
*/
private async getPromptImpl(
server: MCPServer,
name: string,
args?: Record<string, any>
): Promise<GetMCPPromptResponse> {
Logger.info(`[MCP] Getting prompt ${name} from server: ${server.name}`)
const client = await this.initClient(server)
return await client.getPrompt({ name, arguments: args })
}
/**
* Get a specific prompt from an MCP server with caching
*/
public async getPrompt(
_: Electron.IpcMainInvokeEvent,
{ server, name, args }: { server: MCPServer; name: string; args?: Record<string, any> }
): Promise<GetMCPPromptResponse> {
const cachedGetPrompt = withCache<[MCPServer, string, Record<string, any> | undefined], GetMCPPromptResponse>(
this.getPromptImpl.bind(this),
(server, name, args) => {
const serverKey = this.getServerKey(server)
const argsKey = args ? JSON.stringify(args) : 'no-args'
return `mcp:get_prompt:${serverKey}:${name}:${argsKey}`
},
30 * 60 * 1000, // 30 minutes TTL
`[MCP] Prompt ${name} from ${server.name}`
)
return await cachedGetPrompt(server, name, args)
}
/**
* List resources available on an MCP server (implementation)
*/
private async listResourcesImpl(server: MCPServer): Promise<MCPResource[]> {
Logger.info(`[MCP] Listing resources for server: ${server.name}`)
const client = await this.initClient(server)
try {
const result = await client.listResources()
const resources = result.resources || []
const serverResources = (Array.isArray(resources) ? resources : []).map((resource: any) => ({
...resource,
serverId: server.id,
serverName: server.name
}))
return serverResources
} catch (error) {
Logger.error(`[MCP] Failed to list resources for server: ${server.name}`, error)
return []
}
}
/**
* List resources available on an MCP server with caching
*/
public async listResources(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<MCPResource[]> {
const cachedListResources = withCache<[MCPServer], MCPResource[]>(
this.listResourcesImpl.bind(this),
(server) => {
const serverKey = this.getServerKey(server)
return `mcp:list_resources:${serverKey}`
},
60 * 60 * 1000, // 60 minutes TTL
`[MCP] Resources from ${server.name}`
)
return cachedListResources(server)
}
/**
* Get a specific resource from an MCP server (implementation)
*/
private async getResourceImpl(server: MCPServer, uri: string): Promise<GetResourceResponse> {
Logger.info(`[MCP] Getting resource ${uri} from server: ${server.name}`)
const client = await this.initClient(server)
try {
const result = await client.readResource({ uri: uri })
const contents: MCPResource[] = []
if (result.contents && result.contents.length > 0) {
result.contents.forEach((content: any) => {
contents.push({
...content,
serverId: server.id,
serverName: server.name
})
})
}
return {
contents: contents
}
} catch (error: Error | any) {
Logger.error(`[MCP] Failed to get resource ${uri} from server: ${server.name}`, error)
throw new Error(`Failed to get resource ${uri} from server: ${server.name}: ${error.message}`)
}
}
/**
* Get a specific resource from an MCP server with caching
*/
public async getResource(
_: Electron.IpcMainInvokeEvent,
{ server, uri }: { server: MCPServer; uri: string }
): Promise<GetResourceResponse> {
const cachedGetResource = withCache<[MCPServer, string], GetResourceResponse>(
this.getResourceImpl.bind(this),
(server, uri) => {
const serverKey = this.getServerKey(server)
return `mcp:get_resource:${serverKey}:${uri}`
},
30 * 60 * 1000, // 30 minutes TTL
`[MCP] Resource ${uri} from ${server.name}`
)
return await cachedGetResource(server, uri)
}
private getSystemPath = memoize(async (): Promise<string> => {
return new Promise((resolve, reject) => {
let command: string
let shell: string
if (process.platform === 'win32') {
shell = 'powershell.exe'
command = '$env:PATH'
} else {
// 尝试获取当前用户的默认 shell
let userShell = process.env.SHELL
if (!userShell) {
if (fs.existsSync('/bin/zsh')) {
userShell = '/bin/zsh'
} else if (fs.existsSync('/bin/bash')) {
userShell = '/bin/bash'
} else if (fs.existsSync('/bin/fish')) {
userShell = '/bin/fish'
} else {
userShell = '/bin/sh'
}
}
shell = userShell
// 根据不同的 shell 构建不同的命令
if (userShell.includes('zsh')) {
command =
'source /etc/zshenv 2>/dev/null || true; source ~/.zshenv 2>/dev/null || true; source /etc/zprofile 2>/dev/null || true; source ~/.zprofile 2>/dev/null || true; source /etc/zshrc 2>/dev/null || true; source ~/.zshrc 2>/dev/null || true; source /etc/zlogin 2>/dev/null || true; source ~/.zlogin 2>/dev/null || true; echo $PATH'
} else if (userShell.includes('bash')) {
command =
'source /etc/profile 2>/dev/null || true; source ~/.bash_profile 2>/dev/null || true; source ~/.bash_login 2>/dev/null || true; source ~/.profile 2>/dev/null || true; source ~/.bashrc 2>/dev/null || true; echo $PATH'
} else if (userShell.includes('fish')) {
command =
'source /etc/fish/config.fish 2>/dev/null || true; source ~/.config/fish/config.fish 2>/dev/null || true; source ~/.config/fish/config.local.fish 2>/dev/null || true; echo $PATH'
} else {
// 默认使用 zsh
shell = '/bin/zsh'
command =
'source /etc/zshenv 2>/dev/null || true; source ~/.zshenv 2>/dev/null || true; source /etc/zprofile 2>/dev/null || true; source ~/.zprofile 2>/dev/null || true; source /etc/zshrc 2>/dev/null || true; source ~/.zshrc 2>/dev/null || true; source /etc/zlogin 2>/dev/null || true; source ~/.zlogin 2>/dev/null || true; echo $PATH'
}
}
console.log(`Using shell: ${shell} with command: ${command}`)
const child = require('child_process').spawn(shell, ['-c', command], {
env: { ...process.env },
cwd: app.getPath('home')
})
let path = ''
child.stdout.on('data', (data: Buffer) => {
path += data.toString()
})
child.stderr.on('data', (data: Buffer) => {
console.error('Error getting PATH:', data.toString())
})
child.on('close', (code: number) => {
if (code === 0) {
const trimmedPath = path.trim()
resolve(trimmedPath)
} else {
reject(new Error(`Failed to get system PATH, exit code: ${code}`))
}
})
})
})
/**
* Get enhanced PATH including common tool locations
*/
private async getEnhancedPath(originalPath: string): Promise<string> {
let systemPath = ''
try {
systemPath = await this.getSystemPath()
} catch (error) {
Logger.error('[MCP] Failed to get system PATH:', error)
}
// 将原始 PATH 按分隔符分割成数组
const pathSeparator = process.platform === 'win32' ? ';' : ':'
const existingPaths = new Set(
[...systemPath.split(pathSeparator), ...originalPath.split(pathSeparator)].filter(Boolean)
)
const homeDir = process.env.HOME || process.env.USERPROFILE || ''
// 定义要添加的新路径
const newPaths: string[] = []
if (isMac) {
newPaths.push(
'/bin',
'/usr/bin',
'/usr/local/bin',
'/usr/local/sbin',
'/opt/homebrew/bin',
'/opt/homebrew/sbin',
'/usr/local/opt/node/bin',
`${homeDir}/.nvm/current/bin`,
`${homeDir}/.npm-global/bin`,
`${homeDir}/.yarn/bin`,
`${homeDir}/.cargo/bin`,
`${homeDir}/.cherrystudio/bin`,
'/opt/local/bin'
)
}
if (isLinux) {
newPaths.push(
'/bin',
'/usr/bin',
'/usr/local/bin',
`${homeDir}/.nvm/current/bin`,
`${homeDir}/.npm-global/bin`,
`${homeDir}/.yarn/bin`,
`${homeDir}/.cargo/bin`,
`${homeDir}/.cherrystudio/bin`,
'/snap/bin'
)
}
if (isWin) {
newPaths.push(
`${process.env.APPDATA}\\npm`,
`${homeDir}\\AppData\\Local\\Yarn\\bin`,
`${homeDir}\\.cargo\\bin`,
`${homeDir}\\.cherrystudio\\bin`
)
}
// 只添加不存在的路径
newPaths.forEach((path) => {
if (path && !existingPaths.has(path)) {
existingPaths.add(path)
}
})
// 转换回字符串
return Array.from(existingPaths).join(pathSeparator)
}
}
export default new McpService()
const mcpService = new McpService()
export default mcpService

View File

@ -0,0 +1,365 @@
import { auth, AuthResult, OAuthClientProvider, UnauthorizedError } from '@modelcontextprotocol/sdk/client/auth.js'
import { Transport } from '@modelcontextprotocol/sdk/shared/transport.js'
import { JSONRPCMessage, JSONRPCMessageSchema } from '@modelcontextprotocol/sdk/types.js'
export class StreamableHTTPError extends Error {
constructor(
public readonly code: number | undefined,
message: string | undefined,
public readonly event: ErrorEvent
) {
super(`Streamable HTTP error: ${message}`)
}
}
/**
* Configuration options for the `StreamableHTTPClientTransport`.
*/
export type StreamableHTTPClientTransportOptions = {
/**
* An OAuth client provider to use for authentication.
*
* When an `authProvider` is specified and the connection is started:
* 1. The connection is attempted with any existing access token from the `authProvider`.
* 2. If the access token has expired, the `authProvider` is used to refresh the token.
* 3. If token refresh fails or no access token exists, and auth is required, `OAuthClientProvider.redirectToAuthorization` is called, and an `UnauthorizedError` will be thrown from `connect`/`start`.
*
* After the user has finished authorizing via their user agent, and is redirected back to the MCP client application, call `StreamableHTTPClientTransport.finishAuth` with the authorization code before retrying the connection.
*
* If an `authProvider` is not provided, and auth is required, an `UnauthorizedError` will be thrown.
*
* `UnauthorizedError` might also be thrown when sending any message over the transport, indicating that the session has expired, and needs to be re-authed and reconnected.
*/
authProvider?: OAuthClientProvider
/**
* Customizes HTTP requests to the server.
*/
requestInit?: RequestInit
}
/**
* Client transport for Streamable HTTP: this implements the MCP Streamable HTTP transport specification.
* It will connect to a server using HTTP POST for sending messages and HTTP GET with Server-Sent Events
* for receiving messages.
*/
export class StreamableHTTPClientTransport implements Transport {
private _activeStreams: Map<string, ReadableStreamDefaultReader<Uint8Array>> = new Map()
private _abortController?: AbortController
private _url: URL
private _requestInit?: RequestInit
private _authProvider?: OAuthClientProvider
private _sessionId?: string
private _lastEventId?: string
onclose?: () => void
onerror?: (error: Error) => void
onmessage?: (message: JSONRPCMessage) => void
constructor(url: URL, opts?: StreamableHTTPClientTransportOptions) {
this._url = url
this._requestInit = opts?.requestInit
this._authProvider = opts?.authProvider
}
private async _authThenStart(): Promise<void> {
if (!this._authProvider) {
throw new UnauthorizedError('No auth provider')
}
let result: AuthResult
try {
result = await auth(this._authProvider, { serverUrl: this._url })
} catch (error) {
this.onerror?.(error as Error)
throw error
}
if (result !== 'AUTHORIZED') {
throw new UnauthorizedError()
}
return await this._startOrAuth()
}
private async _commonHeaders(): Promise<HeadersInit> {
const headers: HeadersInit = {}
if (this._authProvider) {
const tokens = await this._authProvider.tokens()
if (tokens) {
headers['Authorization'] = `Bearer ${tokens.access_token}`
}
}
if (this._sessionId) {
headers['mcp-session-id'] = this._sessionId
}
return headers
}
private async _startOrAuth(): Promise<void> {
try {
// Try to open an initial SSE stream with GET to listen for server messages
// This is optional according to the spec - server may not support it
const commonHeaders = await this._commonHeaders()
const headers = new Headers(commonHeaders)
headers.set('Accept', 'text/event-stream')
// Include Last-Event-ID header for resumable streams
if (this._lastEventId) {
headers.set('last-event-id', this._lastEventId)
}
const response = await fetch(this._url, {
method: 'GET',
headers,
signal: this._abortController?.signal
})
if (response.status === 405) {
// Server doesn't support GET for SSE, which is allowed by the spec
// We'll rely on SSE responses to POST requests for communication
return
}
if (!response.ok) {
if (response.status === 401 && this._authProvider) {
// Need to authenticate
return await this._authThenStart()
}
const error = new Error(`Failed to open SSE stream: ${response.status} ${response.statusText}`)
this.onerror?.(error)
throw error
}
// Successful connection, handle the SSE stream as a standalone listener
const streamId = `initial-${Date.now()}`
this._handleSseStream(response.body, streamId)
} catch (error) {
this.onerror?.(error as Error)
throw error
}
}
async start() {
if (this._activeStreams.size > 0) {
throw new Error(
'StreamableHTTPClientTransport already started! If using Client class, note that connect() calls start() automatically.'
)
}
this._abortController = new AbortController()
return await this._startOrAuth()
}
/**
* Call this method after the user has finished authorizing via their user agent and is redirected back to the MCP client application. This will exchange the authorization code for an access token, enabling the next connection attempt to successfully auth.
*/
async finishAuth(authorizationCode: string): Promise<void> {
if (!this._authProvider) {
throw new UnauthorizedError('No auth provider')
}
const result = await auth(this._authProvider, { serverUrl: this._url, authorizationCode })
if (result !== 'AUTHORIZED') {
throw new UnauthorizedError('Failed to authorize')
}
}
async close(): Promise<void> {
// Close all active streams
for (const reader of this._activeStreams.values()) {
try {
reader.cancel()
} catch (error) {
this.onerror?.(error as Error)
}
}
this._activeStreams.clear()
// Abort any pending requests
this._abortController?.abort()
// If we have a session ID, send a DELETE request to explicitly terminate the session
if (this._sessionId) {
try {
const commonHeaders = await this._commonHeaders()
const response = await fetch(this._url, {
method: 'DELETE',
headers: commonHeaders,
signal: this._abortController?.signal
})
if (!response.ok) {
// Server might respond with 405 if it doesn't support explicit session termination
// We don't throw an error in that case
if (response.status !== 405) {
const text = await response.text().catch(() => null)
throw new Error(`Error terminating session (HTTP ${response.status}): ${text}`)
}
}
} catch (error) {
// We still want to invoke onclose even if the session termination fails
this.onerror?.(error as Error)
}
}
this.onclose?.()
}
async send(message: JSONRPCMessage | JSONRPCMessage[]): Promise<void> {
try {
const commonHeaders = await this._commonHeaders()
const headers = new Headers({ ...commonHeaders, ...this._requestInit?.headers })
headers.set('content-type', 'application/json')
headers.set('accept', 'application/json, text/event-stream')
const init = {
...this._requestInit,
method: 'POST',
headers,
body: JSON.stringify(message),
signal: this._abortController?.signal
}
const response = await fetch(this._url, init)
// Handle session ID received during initialization
const sessionId = response.headers.get('mcp-session-id')
if (sessionId) {
this._sessionId = sessionId
}
if (!response.ok) {
if (response.status === 401 && this._authProvider) {
const result = await auth(this._authProvider, { serverUrl: this._url })
if (result !== 'AUTHORIZED') {
throw new UnauthorizedError()
}
// Purposely _not_ awaited, so we don't call onerror twice
return this.send(message)
}
const text = await response.text().catch(() => null)
throw new Error(`Error POSTing to endpoint (HTTP ${response.status}): ${text}`)
}
// If the response is 202 Accepted, there's no body to process
if (response.status === 202) {
return
}
// Get original message(s) for detecting request IDs
const messages = Array.isArray(message) ? message : [message]
// Extract IDs from request messages for tracking responses
const requestIds = messages
.filter((msg) => 'method' in msg && 'id' in msg)
.map((msg) => ('id' in msg ? msg.id : undefined))
.filter((id) => id !== undefined)
// If we have request IDs and an SSE response, create a unique stream ID
const hasRequests = requestIds.length > 0
// Check the response type
const contentType = response.headers.get('content-type')
if (hasRequests) {
if (contentType?.includes('text/event-stream')) {
// For streaming responses, create a unique stream ID based on request IDs
const streamId = `req-${requestIds.join('-')}-${Date.now()}`
this._handleSseStream(response.body, streamId)
} else if (contentType?.includes('application/json')) {
// For non-streaming servers, we might get direct JSON responses
const data = await response.json()
const responseMessages = Array.isArray(data)
? data.map((msg) => JSONRPCMessageSchema.parse(msg))
: [JSONRPCMessageSchema.parse(data)]
for (const msg of responseMessages) {
this.onmessage?.(msg)
}
}
}
} catch (error) {
this.onerror?.(error as Error)
throw error
}
}
private _handleSseStream(stream: ReadableStream<Uint8Array> | null, streamId: string): void {
if (!stream) {
return
}
// Set up stream handling for server-sent events
const reader = stream.getReader()
this._activeStreams.set(streamId, reader)
const decoder = new TextDecoder()
let buffer = ''
const processStream = async () => {
try {
while (true) {
const { done, value } = await reader.read()
if (done) {
// Stream closed by server
this._activeStreams.delete(streamId)
break
}
buffer += decoder.decode(value, { stream: true })
// Process SSE messages in the buffer
const events = buffer.split('\n\n')
buffer = events.pop() || ''
for (const event of events) {
const lines = event.split('\n')
let id: string | undefined
let eventType: string | undefined
let data: string | undefined
// Parse SSE message according to the format
for (const line of lines) {
if (line.startsWith('id:')) {
id = line.slice(3).trim()
} else if (line.startsWith('event:')) {
eventType = line.slice(6).trim()
} else if (line.startsWith('data:')) {
data = line.slice(5).trim()
}
}
// Update last event ID if provided by server
// As per spec: the ID MUST be globally unique across all streams within that session
if (id) {
this._lastEventId = id
}
// Handle message event
if (data) {
// Default event type is 'message' per SSE spec if not specified
if (!eventType || eventType === 'message') {
try {
const message = JSONRPCMessageSchema.parse(JSON.parse(data))
this.onmessage?.(message)
} catch (error) {
this.onerror?.(error as Error)
}
}
}
}
}
} catch (error) {
this._activeStreams.delete(streamId)
this.onerror?.(error as Error)
}
}
processStream()
}
}

View File

@ -5,6 +5,8 @@ import { XMLParser } from 'fast-xml-parser'
import { isNil, partial } from 'lodash'
import { type FileStat } from 'webdav'
import { createOAuthUrl, decryptSecret } from '../integration/nutstore/sso/lib/index.mjs'
interface OAuthResponse {
username: string
userid: string
@ -30,18 +32,18 @@ interface WebDAVResponse {
}
export async function getNutstoreSSOUrl() {
const { createOAuthUrl } = await import('../integration/nutstore/sso/lib')
const url = createOAuthUrl({
const url = await createOAuthUrl({
app: 'cherrystudio'
})
return url
}
export async function decryptToken(token: string) {
const { decrypt } = await import('../integration/nutstore/sso/lib')
try {
const decrypted = decrypt('cherrystudio', token)
const decrypted = await decryptSecret({
app: 'cherrystudio',
s: token
})
return JSON.parse(decrypted) as OAuthResponse
} catch (error) {
console.error('解密失败:', error)
@ -112,10 +114,10 @@ function convertToFileStat(serverBase: string, item: WebDAVResponse['multistatus
const props = item.propstat.prop
const isDir = !isNil(props.resourcetype?.collection)
const href = decodeURIComponent(item.href)
const filename = serverBase === '/' ? href : path.join('/', href.replace(serverBase, ''))
const filename = serverBase === '/' ? href : path.posix.join('/', href.replace(serverBase, ''))
return {
filename,
filename: filename.endsWith('/') ? filename.slice(0, -1) : filename,
basename: path.basename(filename),
lastmod: props.getlastmodified || '',
size: props.getcontentlength ? parseInt(props.getcontentlength, 10) : 0,

View File

@ -1,5 +1,6 @@
import { ProxyConfig as _ProxyConfig, session } from 'electron'
import { socksDispatcher } from 'fetch-socks'
import { getSystemProxy } from 'os-proxy-config'
import { ProxyAgent as GeneralProxyAgent } from 'proxy-agent'
import { ProxyAgent, setGlobalDispatcher } from 'undici'
@ -70,15 +71,14 @@ export class ProxyManager {
private async setSystemProxy(): Promise<void> {
try {
await this.setSessionsProxy({ mode: 'system' })
const proxyString = await session.defaultSession.resolveProxy('https://dummy.com')
const [protocol, address] = proxyString.split(';')[0].split(' ')
const url = protocol === 'PROXY' ? `http://${address}` : null
if (url && url !== this.config.url) {
this.config.url = url.toLowerCase()
this.setEnvironment(this.config.url)
this.proxyAgent = new GeneralProxyAgent()
const currentProxy = await getSystemProxy()
if (!currentProxy || currentProxy.proxyUrl === this.config.url) {
return
}
await this.setSessionsProxy({ mode: 'system' })
this.config.url = currentProxy.proxyUrl.toLowerCase()
this.setEnvironment(this.config.url)
this.proxyAgent = new GeneralProxyAgent()
} catch (error) {
console.error('Failed to set system proxy:', error)
throw error

View File

@ -1,3 +1,4 @@
import { IpcChannel } from '@shared/IpcChannel'
import { ipcMain } from 'electron'
import { EventEmitter } from 'events'
@ -10,6 +11,8 @@ export class ReduxService extends EventEmitter {
private stateCache: any = {}
private isReady = false
private readonly STATUS_CHANGE_EVENT = 'statusChange'
constructor() {
super()
this.setupIpcHandlers()
@ -17,15 +20,15 @@ export class ReduxService extends EventEmitter {
private setupIpcHandlers() {
// 监听 store 就绪事件
ipcMain.handle('redux-store-ready', () => {
ipcMain.handle(IpcChannel.ReduxStoreReady, () => {
this.isReady = true
this.emit('ready')
})
// 监听 store 状态变化
ipcMain.on('redux-state-change', (_, newState) => {
ipcMain.on(IpcChannel.ReduxStateChange, (_, newState) => {
this.stateCache = newState
this.emit('stateChange', newState)
this.emit(this.STATUS_CHANGE_EVENT, newState)
})
}
@ -122,19 +125,23 @@ export class ReduxService extends EventEmitter {
await this.waitForStoreReady(mainWindow.webContents)
// 在渲染进程中设置监听
await mainWindow.webContents.executeJavaScript(`
await mainWindow.webContents.executeJavaScript(
`
if (!window._storeSubscriptions) {
window._storeSubscriptions = new Set();
// 设置全局状态变化监听
const unsubscribe = window.store.subscribe(() => {
const state = window.store.getState();
window.electron.ipcRenderer.send('redux-state-change', state);
window.electron.ipcRenderer.send('` +
IpcChannel.ReduxStateChange +
`', state);
});
window._storeSubscriptions.add(unsubscribe);
}
`)
`
)
// 在主进程中处理回调
const handler = async () => {
@ -146,9 +153,9 @@ export class ReduxService extends EventEmitter {
}
}
this.on('stateChange', handler)
this.on(this.STATUS_CHANGE_EVENT, handler)
return () => {
this.off('stateChange', handler)
this.off(this.STATUS_CHANGE_EVENT, handler)
}
}
@ -180,41 +187,41 @@ export class ReduxService extends EventEmitter {
export const reduxService = new ReduxService()
/** example
async function example() {
try {
// 读取状态
const settings = await reduxService.select('state.settings')
console.log('settings', settings)
async function example() {
try {
// 读取状态
const settings = await reduxService.select('state.settings')
console.log('settings', settings)
// 派发 action
await reduxService.dispatch({
type: 'settings/updateApiKey',
payload: 'new-api-key'
})
// 派发 action
await reduxService.dispatch({
type: 'settings/updateApiKey',
payload: 'new-api-key'
})
// 订阅状态变化
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
console.log('API key changed:', newValue)
})
// 订阅状态变化
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
console.log('API key changed:', newValue)
})
// 批量执行 actions
await reduxService.batch([
{ type: 'action1', payload: 'data1' },
{ type: 'action2', payload: 'data2' }
])
// 批量执行 actions
await reduxService.batch([
{ type: 'action1', payload: 'data1' },
{ type: 'action2', payload: 'data2' }
])
// 同步方法虽然可能不是最新的数据,但响应更快
const apiKey = reduxService.selectSync('state.settings.apiKey')
console.log('apiKey', apiKey)
// 同步方法虽然可能不是最新的数据,但响应更快
const apiKey = reduxService.selectSync('state.settings.apiKey')
console.log('apiKey', apiKey)
// 处理保证是最新的数据
const apiKey1 = await reduxService.select('state.settings.apiKey')
console.log('apiKey1', apiKey1)
// 处理保证是最新的数据
const apiKey1 = await reduxService.select('state.settings.apiKey')
console.log('apiKey1', apiKey1)
// 取消订阅
unsubscribe()
} catch (error) {
console.error('Error:', error)
}
}
*/
// 取消订阅
unsubscribe()
} catch (error) {
console.error('Error:', error)
}
}
*/

View File

@ -0,0 +1,82 @@
import { is } from '@electron-toolkit/utils'
import { BrowserWindow } from 'electron'
export class SearchService {
private static instance: SearchService | null = null
private searchWindows: Record<string, BrowserWindow> = {}
public static getInstance(): SearchService {
if (!SearchService.instance) {
SearchService.instance = new SearchService()
}
return SearchService.instance
}
constructor() {
// Initialize the service
}
private async createNewSearchWindow(uid: string): Promise<BrowserWindow> {
const newWindow = new BrowserWindow({
width: 800,
height: 600,
show: false,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
devTools: is.dev
}
})
newWindow.webContents.session.webRequest.onBeforeSendHeaders({ urls: ['*://*/*'] }, (details, callback) => {
const headers = {
...details.requestHeaders,
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
callback({ requestHeaders: headers })
})
this.searchWindows[uid] = newWindow
newWindow.on('closed', () => {
delete this.searchWindows[uid]
})
return newWindow
}
public async openSearchWindow(uid: string): Promise<void> {
await this.createNewSearchWindow(uid)
}
public async closeSearchWindow(uid: string): Promise<void> {
const window = this.searchWindows[uid]
if (window) {
window.close()
delete this.searchWindows[uid]
}
}
public async openUrlInSearchWindow(uid: string, url: string): Promise<any> {
let window = this.searchWindows[uid]
if (window) {
await window.loadURL(url)
} else {
window = await this.createNewSearchWindow(uid)
await window.loadURL(url)
}
// Get the page content after loading the URL
// Wait for the page to fully load before getting the content
await new Promise<void>((resolve) => {
const loadTimeout = setTimeout(() => resolve(), 10000) // 10 second timeout
window.webContents.once('did-finish-load', () => {
clearTimeout(loadTimeout)
// Small delay to ensure JavaScript has executed
setTimeout(resolve, 500)
})
})
// Get the page content after ensuring it's fully loaded
const content = await window.webContents.executeJavaScript('document.documentElement.outerHTML')
return content
}
}
export const searchService = SearchService.getInstance()

View File

@ -23,17 +23,8 @@ function getShortcutHandler(shortcut: Shortcut) {
configManager.setZoomFactor(1)
}
case 'show_app':
return (window: BrowserWindow) => {
if (window.isVisible()) {
if (window.isFocused()) {
window.hide()
} else {
window.focus()
}
} else {
window.show()
window.focus()
}
return () => {
windowService.toggleMainWindow()
}
case 'mini_window':
return () => {

View File

@ -26,6 +26,7 @@ export default class WebDav {
this.putFileContents = this.putFileContents.bind(this)
this.getFileContents = this.getFileContents.bind(this)
this.createDirectory = this.createDirectory.bind(this)
this.deleteFile = this.deleteFile.bind(this)
}
public putFileContents = async (
@ -98,4 +99,19 @@ export default class WebDav {
throw error
}
}
public deleteFile = async (filename: string) => {
if (!this.instance) {
throw new Error('WebDAV client not initialized')
}
const remoteFilePath = `${this.webdavPath}/${filename}`
try {
return await this.instance.deleteFile(remoteFilePath)
} catch (error) {
Logger.error('[WebDAV] Error deleting file on WebDAV:', error)
throw error
}
}
}

View File

@ -1,6 +1,7 @@
import { is } from '@electron-toolkit/utils'
import { isDev, isLinux, isMac, isWin } from '@main/constant'
import { getFilesDir } from '@main/utils/file'
import { IpcChannel } from '@shared/IpcChannel'
import { app, BrowserWindow, ipcMain, Menu, MenuItem, shell } from 'electron'
import Logger from 'electron-log'
import windowStateKeeper from 'electron-window-state'
@ -15,7 +16,10 @@ export class WindowService {
private static instance: WindowService | null = null
private mainWindow: BrowserWindow | null = null
private miniWindow: BrowserWindow | null = null
private wasFullScreen: boolean = false
private isPinnedMiniWindow: boolean = false
//hacky-fix: store the focused status of mainWindow before miniWindow shows
//to restore the focus status when miniWindow hides
private wasMainWindowFocused: boolean = false
private selectionMenuWindow: BrowserWindow | null = null
private lastSelectedText: string = ''
private contextMenu: Menu | null = null
@ -30,12 +34,14 @@ export class WindowService {
public createMainWindow(): BrowserWindow {
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
this.mainWindow.show()
this.mainWindow.focus()
return this.mainWindow
}
const mainWindowState = windowStateKeeper({
defaultWidth: 1080,
defaultHeight: 670
defaultHeight: 670,
fullScreen: false
})
const theme = configManager.getTheme()
@ -47,7 +53,7 @@ export class WindowService {
height: mainWindowState.height,
minWidth: 1080,
minHeight: 600,
show: false, // 初始不显示
show: false,
autoHideMenuBar: true,
transparent: isMac,
vibrancy: 'sidebar',
@ -56,7 +62,7 @@ export class WindowService {
titleBarOverlay: theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight,
backgroundColor: isMac ? undefined : theme === 'dark' ? '#181818' : '#FFFFFF',
trafficLightPosition: { x: 8, y: 12 },
...(process.platform === 'linux' ? { icon } : {}),
...(isLinux ? { icon } : {}),
webPreferences: {
preload: join(__dirname, '../preload/index.js'),
sandbox: false,
@ -68,39 +74,15 @@ export class WindowService {
this.setupMainWindow(this.mainWindow, mainWindowState)
//preload miniWindow to resolve series of issues about miniWindow in Mac
const enableQuickAssistant = configManager.getEnableQuickAssistant()
if (enableQuickAssistant && !this.miniWindow) {
this.miniWindow = this.createMiniWindow(true)
}
return this.mainWindow
}
public createMinappWindow({
url,
parent,
windowOptions
}: {
url: string
parent?: BrowserWindow
windowOptions?: Electron.BrowserWindowConstructorOptions
}): BrowserWindow {
const width = windowOptions?.width || 1000
const height = windowOptions?.height || 680
const minappWindow = new BrowserWindow({
width,
height,
autoHideMenuBar: true,
title: 'Cherry Studio',
...windowOptions,
parent,
webPreferences: {
preload: join(__dirname, '../preload/minapp.js'),
sandbox: false,
contextIsolation: false
}
})
minappWindow.loadURL(url)
return minappWindow
}
private setupMainWindow(mainWindow: BrowserWindow, mainWindowState: any) {
mainWindowState.manage(mainWindow)
@ -148,21 +130,40 @@ export class WindowService {
// show window only when laucn to tray not set
const isLaunchToTray = configManager.getLaunchToTray()
if (!isLaunchToTray) {
//[mac]hacky-fix: miniWindow set visibleOnFullScreen:true will cause dock icon disappeared
app.dock?.show()
mainWindow.show()
}
})
// 处理全屏相关事件
mainWindow.on('enter-full-screen', () => {
this.wasFullScreen = true
mainWindow.webContents.send('fullscreen-status-changed', true)
mainWindow.webContents.send(IpcChannel.FullscreenStatusChanged, true)
})
mainWindow.on('leave-full-screen', () => {
this.wasFullScreen = false
mainWindow.webContents.send('fullscreen-status-changed', false)
mainWindow.webContents.send(IpcChannel.FullscreenStatusChanged, false)
})
// set the zoom factor again when the window is going to resize
//
// this is a workaround for the known bug that
// the zoom factor is reset to cached value when window is resized after routing to other page
// see: https://github.com/electron/electron/issues/10572
//
mainWindow.on('will-resize', () => {
mainWindow.webContents.setZoomFactor(configManager.getZoomFactor())
})
// ARCH: as `will-resize` is only for Win & Mac,
// linux has the same problem, use `resize` listener instead
// but `resize` will fliker the ui
if (isLinux) {
mainWindow.on('resize', () => {
mainWindow.webContents.setZoomFactor(configManager.getZoomFactor())
})
}
// 添加Escape键退出全屏的支持
mainWindow.webContents.on('before-input-event', (event, input) => {
// 当按下Escape键且窗口处于全屏状态时退出全屏
@ -242,6 +243,7 @@ export class WindowService {
private loadMainWindowContent(mainWindow: BrowserWindow) {
if (is.dev && process.env['ELECTRON_RENDERER_URL']) {
mainWindow.loadURL(process.env['ELECTRON_RENDERER_URL'])
// mainWindow.webContents.openDevTools()
} else {
mainWindow.loadFile(join(__dirname, '../renderer/index.html'))
}
@ -271,23 +273,19 @@ export class WindowService {
}
}
//上述逻辑以下,是“开启托盘+设置关闭时最小化到托盘”的情况
// 如果是Windows或Linux且处于全屏状态则退出应用
if (this.wasFullScreen) {
if (isWin || isLinux) {
return app.quit()
} else {
event.preventDefault()
mainWindow.setFullScreen(false)
return
}
}
/**
* :
* win/linux: +
* mac: 任何情况都会到这里mac
*/
event.preventDefault()
mainWindow.hide()
//for mac users, should hide dock icon if close to tray
if (isMac && isTrayOnClose) {
app.dock?.hide() //for mac to hide to tray
app.dock?.hide()
}
})
@ -309,17 +307,148 @@ export class WindowService {
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
if (this.mainWindow.isMinimized()) {
return this.mainWindow.restore()
this.mainWindow.restore()
return
}
/**
* About setVisibleOnAllWorkspaces
*
* [macOS] Known Issue
* setVisibleOnAllWorkspaces true/false will NOT bring window to current desktop in Mac (works fine with Windows)
* AppleScript may be a solution, but it's not worth
*
* [Linux] Known Issue
* setVisibleOnAllWorkspaces Linux KDE Wayland"假弹出"
* Linux
*/
if (!isLinux) {
this.mainWindow.setVisibleOnAllWorkspaces(true)
}
/**
* [macOS] After being closed in fullscreen, the fullscreen behavior will become strange when window shows again
* So we need to set it to FALSE explicitly.
* althougle other platforms don't have the issue, but it's a good practice to do so
*
* Check if window is visible to prevent interrupting fullscreen state when clicking dock icon
*/
if (this.mainWindow.isFullScreen() && !this.mainWindow.isVisible()) {
this.mainWindow.setFullScreen(false)
}
this.mainWindow.show()
this.mainWindow.focus()
if (!isLinux) {
this.mainWindow.setVisibleOnAllWorkspaces(false)
}
} else {
this.mainWindow = this.createMainWindow()
this.mainWindow.focus()
}
}
public toggleMainWindow() {
// should not toggle main window when in full screen
// but if the main window is close to tray when it's in full screen, we can show it again
// (it's a bug in macos, because we can close the window when it's in full screen, and the state will be remained)
if (this.mainWindow?.isFullScreen() && this.mainWindow?.isVisible()) {
return
}
//for mac users, when window is shown, should show dock icon (dock may be set to hide when launch)
app.dock?.show()
if (this.mainWindow && !this.mainWindow.isDestroyed() && this.mainWindow.isVisible()) {
if (this.mainWindow.isFocused()) {
// if tray is enabled, hide the main window, else do nothing
if (configManager.getTray()) {
this.mainWindow.hide()
app.dock?.hide()
}
} else {
this.mainWindow.focus()
}
return
}
this.showMainWindow()
}
public createMiniWindow(isPreload: boolean = false): BrowserWindow {
this.miniWindow = new BrowserWindow({
width: 550,
height: 400,
minWidth: 350,
minHeight: 380,
maxWidth: 1024,
maxHeight: 768,
show: false,
autoHideMenuBar: true,
transparent: isMac,
vibrancy: 'under-window',
visualEffectState: 'followWindow',
center: true,
frame: false,
alwaysOnTop: true,
resizable: true,
useContentSize: true,
...(isMac ? { type: 'panel' } : {}),
skipTaskbar: true,
minimizable: false,
maximizable: false,
fullscreenable: false,
webPreferences: {
preload: join(__dirname, '../preload/index.js'),
sandbox: false,
webSecurity: false,
webviewTag: true
}
})
//miniWindow should show in current desktop
this.miniWindow?.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true })
//make miniWindow always on top of fullscreen apps with level set
//[mac] level higher than 'floating' will cover the pinyin input method
this.miniWindow.setAlwaysOnTop(true, 'floating')
this.miniWindow.on('ready-to-show', () => {
if (isPreload) {
return
}
this.wasMainWindowFocused = this.mainWindow?.isFocused() || false
this.miniWindow?.center()
this.miniWindow?.show()
})
this.miniWindow.on('blur', () => {
if (!this.isPinnedMiniWindow) {
this.hideMiniWindow()
}
})
this.miniWindow.on('closed', () => {
this.miniWindow = null
})
this.miniWindow.on('hide', () => {
this.miniWindow?.webContents.send(IpcChannel.HideMiniWindow)
})
this.miniWindow.on('show', () => {
this.miniWindow?.webContents.send(IpcChannel.ShowMiniWindow)
})
ipcMain.on(IpcChannel.MiniWindowReload, () => {
this.miniWindow?.reload()
})
if (is.dev && process.env['ELECTRON_RENDERER_URL']) {
this.miniWindow.loadURL(process.env['ELECTRON_RENDERER_URL'] + '#/mini')
} else {
this.miniWindow.loadFile(join(__dirname, '../renderer/index.html'), {
hash: '#/mini'
})
}
return this.miniWindow
}
public showMiniWindow() {
@ -334,66 +463,32 @@ export class WindowService {
}
if (this.miniWindow && !this.miniWindow.isDestroyed()) {
this.wasMainWindowFocused = this.mainWindow?.isFocused() || false
if (this.miniWindow.isMinimized()) {
this.miniWindow.restore()
}
this.miniWindow.show()
this.miniWindow.center()
this.miniWindow.focus()
return
}
this.miniWindow = new BrowserWindow({
width: 500,
height: 520,
show: true,
autoHideMenuBar: true,
transparent: isMac,
vibrancy: 'under-window',
visualEffectState: 'followWindow',
center: true,
frame: false,
alwaysOnTop: true,
resizable: false,
useContentSize: true,
webPreferences: {
preload: join(__dirname, '../preload/index.js'),
sandbox: false,
webSecurity: false,
webviewTag: true
}
})
this.miniWindow.on('blur', () => {
this.miniWindow?.hide()
})
this.miniWindow.on('closed', () => {
this.miniWindow = null
})
this.miniWindow.on('hide', () => {
this.miniWindow?.webContents.send('hide-mini-window')
})
this.miniWindow.on('show', () => {
this.miniWindow?.webContents.send('show-mini-window')
})
ipcMain.on('miniwindow-reload', () => {
this.miniWindow?.reload()
})
if (is.dev && process.env['ELECTRON_RENDERER_URL']) {
this.miniWindow.loadURL(process.env['ELECTRON_RENDERER_URL'] + '#/mini')
} else {
this.miniWindow.loadFile(join(__dirname, '../renderer/index.html'), {
hash: '#/mini'
})
}
this.miniWindow = this.createMiniWindow()
}
public hideMiniWindow() {
//hacky-fix:[mac/win] previous window(not self-app) should be focused again after miniWindow hide
if (isWin) {
this.miniWindow?.minimize()
this.miniWindow?.hide()
return
} else if (isMac) {
this.miniWindow?.hide()
if (!this.wasMainWindowFocused) {
app.hide()
}
return
}
this.miniWindow?.hide()
}
@ -402,11 +497,16 @@ export class WindowService {
}
public toggleMiniWindow() {
if (this.miniWindow) {
this.miniWindow.isVisible() ? this.miniWindow.hide() : this.miniWindow.show()
} else {
this.showMiniWindow()
if (this.miniWindow && !this.miniWindow.isDestroyed() && this.miniWindow.isVisible()) {
this.hideMiniWindow()
return
}
this.showMiniWindow()
}
public setPinMiniWindow(isPinned) {
this.isPinnedMiniWindow = isPinned
}
public showSelectionMenu(bounds: { x: number; y: number }) {
@ -442,7 +542,7 @@ export class WindowService {
// 点击其他地方时隐藏窗口
this.selectionMenuWindow.on('blur', () => {
this.selectionMenuWindow?.hide()
this.miniWindow?.webContents.send('selection-action', {
this.miniWindow?.webContents.send(IpcChannel.SelectionAction, {
action: 'home',
selectedText: this.lastSelectedText
})
@ -460,12 +560,12 @@ export class WindowService {
private setupSelectionMenuEvents() {
if (!this.selectionMenuWindow) return
ipcMain.removeHandler('selection-menu:action')
ipcMain.handle('selection-menu:action', (_, action) => {
ipcMain.removeHandler(IpcChannel.SelectionMenu_Action)
ipcMain.handle(IpcChannel.SelectionMenu_Action, (_, action) => {
this.selectionMenuWindow?.hide()
this.showMiniWindow()
setTimeout(() => {
this.miniWindow?.webContents.send('selection-action', {
this.miniWindow?.webContents.send(IpcChannel.SelectionAction, {
action,
selectedText: this.lastSelectedText
})

View File

@ -0,0 +1,76 @@
import Logger from 'electron-log'
import EventEmitter from 'events'
import http from 'http'
import { URL } from 'url'
import { OAuthCallbackServerOptions } from './types'
export class CallBackServer {
private server: Promise<http.Server>
private events: EventEmitter
constructor(options: OAuthCallbackServerOptions) {
const { port, path, events } = options
this.events = events
this.server = this.initialize(port, path)
}
initialize(port: number, path: string): Promise<http.Server> {
const server = http.createServer((req, res) => {
// Only handle requests to the callback path
if (req.url?.startsWith(path)) {
try {
// Parse the URL to extract the authorization code
const url = new URL(req.url, `http://localhost:${port}`)
const code = url.searchParams.get('code')
if (code) {
// Emit the code event
this.events.emit('auth-code-received', code)
}
} catch (error) {
Logger.error('Error processing OAuth callback:', error)
res.writeHead(500, { 'Content-Type': 'text/plain' })
res.end('Internal Server Error')
}
} else {
// Not a callback request
res.writeHead(404, { 'Content-Type': 'text/plain' })
res.end('Not Found')
}
})
// Handle server errors
server.on('error', (error) => {
Logger.error('OAuth callback server error:', error)
})
const runningServer = new Promise<http.Server>((resolve, reject) => {
server.listen(port, () => {
Logger.info(`OAuth callback server listening on port ${port}`)
resolve(server)
})
server.on('error', (error) => {
reject(error)
})
})
return runningServer
}
get getServer(): Promise<http.Server> {
return this.server
}
async close() {
const server = await this.server
server.close()
}
async waitForAuthCode(): Promise<string> {
return new Promise((resolve) => {
this.events.once('auth-code-received', (code) => {
resolve(code)
})
})
}
}

View File

@ -0,0 +1,78 @@
import path from 'node:path'
import { getConfigDir } from '@main/utils/file'
import { OAuthClientProvider } from '@modelcontextprotocol/sdk/client/auth'
import { OAuthClientInformation, OAuthClientInformationFull, OAuthTokens } from '@modelcontextprotocol/sdk/shared/auth'
import Logger from 'electron-log'
import open from 'open'
import { JsonFileStorage } from './storage'
import { OAuthProviderOptions } from './types'
export class McpOAuthClientProvider implements OAuthClientProvider {
private storage: JsonFileStorage
public readonly config: Required<OAuthProviderOptions>
constructor(options: OAuthProviderOptions) {
const configDir = path.join(getConfigDir(), 'mcp', 'oauth')
this.config = {
serverUrlHash: options.serverUrlHash,
callbackPort: options.callbackPort || 12346,
callbackPath: options.callbackPath || '/oauth/callback',
configDir: options.configDir || configDir,
clientName: options.clientName || 'Cherry Studio',
clientUri: options.clientUri || 'https://github.com/CherryHQ/cherry-studio'
}
this.storage = new JsonFileStorage(this.config.serverUrlHash, this.config.configDir)
}
get redirectUrl(): string {
return `http://localhost:${this.config.callbackPort}${this.config.callbackPath}`
}
get clientMetadata() {
return {
redirect_uris: [this.redirectUrl],
token_endpoint_auth_method: 'none',
grant_types: ['authorization_code', 'refresh_token'],
response_types: ['code'],
client_name: this.config.clientName,
client_uri: this.config.clientUri
}
}
async clientInformation(): Promise<OAuthClientInformation | undefined> {
return this.storage.getClientInformation()
}
async saveClientInformation(info: OAuthClientInformationFull): Promise<void> {
await this.storage.saveClientInformation(info)
}
async tokens(): Promise<OAuthTokens | undefined> {
return this.storage.getTokens()
}
async saveTokens(tokens: OAuthTokens): Promise<void> {
await this.storage.saveTokens(tokens)
}
async redirectToAuthorization(authorizationUrl: URL): Promise<void> {
try {
// Open the browser to the authorization URL
await open(authorizationUrl.toString())
Logger.info('Browser opened automatically.')
} catch (error) {
Logger.error('Could not open browser automatically.')
throw error // Let caller handle the error
}
}
async saveCodeVerifier(codeVerifier: string): Promise<void> {
await this.storage.saveCodeVerifier(codeVerifier)
}
async codeVerifier(): Promise<string> {
return this.storage.getCodeVerifier()
}
}

View File

@ -0,0 +1,120 @@
import {
OAuthClientInformation,
OAuthClientInformationFull,
OAuthTokens
} from '@modelcontextprotocol/sdk/shared/auth.js'
import Logger from 'electron-log'
import fs from 'fs/promises'
import path from 'path'
import { IOAuthStorage, OAuthStorageData, OAuthStorageSchema } from './types'
export class JsonFileStorage implements IOAuthStorage {
private readonly filePath: string
private cache: OAuthStorageData | null = null
constructor(
readonly serverUrlHash: string,
configDir: string
) {
this.filePath = path.join(configDir, `${serverUrlHash}_oauth.json`)
}
private async readStorage(): Promise<OAuthStorageData> {
if (this.cache) {
return this.cache
}
try {
const data = await fs.readFile(this.filePath, 'utf-8')
const parsed = JSON.parse(data)
const validated = OAuthStorageSchema.parse(parsed)
this.cache = validated
return validated
} catch (error) {
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
// File doesn't exist, return initial state
const initial: OAuthStorageData = { lastUpdated: Date.now() }
await this.writeStorage(initial)
return initial
}
Logger.error('Error reading OAuth storage:', error)
throw new Error(`Failed to read OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
}
}
private async writeStorage(data: OAuthStorageData): Promise<void> {
try {
// Ensure directory exists
await fs.mkdir(path.dirname(this.filePath), { recursive: true })
// Update timestamp
data.lastUpdated = Date.now()
// Write file atomically
const tempPath = `${this.filePath}.tmp`
await fs.writeFile(tempPath, JSON.stringify(data, null, 2))
await fs.rename(tempPath, this.filePath)
// Update cache
this.cache = data
} catch (error) {
Logger.error('Error writing OAuth storage:', error)
throw new Error(`Failed to write OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
}
}
async getClientInformation(): Promise<OAuthClientInformation | undefined> {
const data = await this.readStorage()
return data.clientInfo
}
async saveClientInformation(info: OAuthClientInformationFull): Promise<void> {
const data = await this.readStorage()
await this.writeStorage({
...data,
clientInfo: info
})
}
async getTokens(): Promise<OAuthTokens | undefined> {
const data = await this.readStorage()
return data.tokens
}
async saveTokens(tokens: OAuthTokens): Promise<void> {
const data = await this.readStorage()
await this.writeStorage({
...data,
tokens
})
}
async getCodeVerifier(): Promise<string> {
const data = await this.readStorage()
if (!data.codeVerifier) {
throw new Error('No code verifier saved for session')
}
return data.codeVerifier
}
async saveCodeVerifier(codeVerifier: string): Promise<void> {
const data = await this.readStorage()
await this.writeStorage({
...data,
codeVerifier
})
}
async clear(): Promise<void> {
try {
await fs.unlink(this.filePath)
this.cache = null
} catch (error) {
if (error instanceof Error && 'code' in error && error.code !== 'ENOENT') {
Logger.error('Error clearing OAuth storage:', error)
throw new Error(`Failed to clear OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
}
}
}
}

View File

@ -0,0 +1,61 @@
import {
OAuthClientInformation,
OAuthClientInformationFull,
OAuthTokens
} from '@modelcontextprotocol/sdk/shared/auth.js'
import EventEmitter from 'events'
import { z } from 'zod'
export interface OAuthStorageData {
clientInfo?: OAuthClientInformation
tokens?: OAuthTokens
codeVerifier?: string
lastUpdated: number
}
export const OAuthStorageSchema = z.object({
clientInfo: z.any().optional(),
tokens: z.any().optional(),
codeVerifier: z.string().optional(),
lastUpdated: z.number()
})
export interface IOAuthStorage {
getClientInformation(): Promise<OAuthClientInformation | undefined>
saveClientInformation(info: OAuthClientInformationFull): Promise<void>
getTokens(): Promise<OAuthTokens | undefined>
saveTokens(tokens: OAuthTokens): Promise<void>
getCodeVerifier(): Promise<string>
saveCodeVerifier(codeVerifier: string): Promise<void>
clear(): Promise<void>
}
/**
* OAuth callback server setup options
*/
export interface OAuthCallbackServerOptions {
/** Port for the callback server */
port: number
/** Path for the callback endpoint */
path: string
/** Event emitter to signal when auth code is received */
events: EventEmitter
}
/**
* Options for creating an OAuth client provider
*/
export interface OAuthProviderOptions {
/** Server URL to connect to */
serverUrlHash: string
/** Port for the OAuth callback server */
callbackPort?: number
/** Path for the OAuth callback endpoint */
callbackPath?: string
/** Directory to store OAuth credentials */
configDir?: string
/** Client name to use for OAuth registration */
clientName?: string
/** Client URI to use for OAuth registration */
clientUri?: string
}

View File

@ -1,4 +1,5 @@
import * as fs from 'node:fs'
import os from 'node:os'
import path from 'node:path'
import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant'
@ -74,3 +75,11 @@ export function getTempDir() {
export function getFilesDir() {
return path.join(app.getPath('userData'), 'Data', 'Files')
}
export function getConfigDir() {
return path.join(os.homedir(), '.cherrystudio', 'config')
}
export function getAppConfigDir(name: string) {
return path.join(getConfigDir(), name)
}

View File

@ -46,3 +46,9 @@ export function dumpPersistState() {
export const runAsyncFunction = async (fn: () => void) => {
await fn()
}
export function makeSureDirExists(dir: string) {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true })
}
}

View File

@ -42,7 +42,11 @@ export async function getBinaryName(name: string): Promise<string> {
return name
}
export async function getBinaryPath(name: string): Promise<string> {
export async function getBinaryPath(name?: string): Promise<string> {
if (!name) {
return path.join(os.homedir(), '.cherrystudio', 'bin')
}
const binaryName = await getBinaryName(name)
const binariesDir = path.join(os.homedir(), '.cherrystudio', 'bin')
const binariesDirExists = await fs.existsSync(binariesDir)

View File

@ -1,7 +1,7 @@
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import { ElectronAPI } from '@electron-toolkit/preload'
import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server'
import type { MCPServer, MCPTool } from '@renderer/types'
import type { File } from '@google/genai'
import type { GetMCPPromptResponse, MCPPrompt, MCPResource, MCPServer, MCPTool } from '@renderer/types'
import { AppInfo, FileType, KnowledgeBaseParams, KnowledgeItem, LanguageVarious, WebDavConfig } from '@renderer/types'
import type { LoaderReturn } from '@shared/config/types'
import type { OpenDialogOptions } from 'electron'
@ -29,11 +29,16 @@ declare global {
setTrayOnClose: (isActive: boolean) => void
restartTray: () => void
setTheme: (theme: 'light' | 'dark') => void
minApp: (options: { url: string; windowOptions?: Electron.BrowserWindowConstructorOptions }) => void
setCustomCss: (css: string) => void
setAutoUpdate: (isActive: boolean) => void
reload: () => void
clearCache: () => Promise<{ success: boolean; error?: string }>
sentry: {
init: () => Promise<void>
}
system: {
getDeviceType: () => Promise<'mac' | 'windows' | 'linux'>
getHostname: () => Promise<string>
}
zip: {
compress: (text: string) => Promise<Buffer>
@ -47,6 +52,7 @@ declare global {
listWebdavFiles: (webdavConfig: WebDavConfig) => Promise<BackupFile[]>
checkConnection: (webdavConfig: WebDavConfig) => Promise<boolean>
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) => Promise<void>
deleteWebdavFile: (fileName: string, webdavConfig: WebDavConfig) => Promise<boolean>
}
file: {
select: (options?: OpenDialogOptions) => Promise<FileType[] | null>
@ -119,11 +125,11 @@ declare global {
resetMinimumSize: () => Promise<void>
}
gemini: {
uploadFile: (file: FileType, apiKey: string) => Promise<UploadFileResponse>
retrieveFile: (file: FileType, apiKey: string) => Promise<FileMetadataResponse | undefined>
uploadFile: (file: FileType, apiKey: string) => Promise<File>
retrieveFile: (file: FileType, apiKey: string) => Promise<File | undefined>
base64File: (file: FileType) => Promise<{ data: string; mimeType: string }>
listFiles: (apiKey: string) => Promise<ListFilesResponse>
deleteFile: (apiKey: string, fileId: string) => Promise<void>
listFiles: (apiKey: string) => Promise<File[]>
deleteFile: (fileId: string, apiKey: string) => Promise<void>
}
selectionMenu: {
action: (action: string) => Promise<void>
@ -137,6 +143,7 @@ declare global {
hide: () => Promise<void>
close: () => Promise<void>
toggle: () => Promise<void>
setPin: (isPinned: boolean) => Promise<void>
}
aes: {
encrypt: (text: string, secretKey: string, iv: string) => Promise<{ iv: string; encryptedData: string }>
@ -150,7 +157,27 @@ declare global {
restartServer: (server: MCPServer) => Promise<void>
stopServer: (server: MCPServer) => Promise<void>
listTools: (server: MCPServer) => Promise<MCPTool[]>
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) => Promise<any>
callTool: ({
server,
name,
args
}: {
server: MCPServer
name: string
args: any
}) => Promise<MCPCallToolResponse>
listPrompts: (server: MCPServer) => Promise<MCPPrompt[]>
getPrompt: ({
server,
name,
args
}: {
server: MCPServer
name: string
args?: Record<string, any>
}) => Promise<GetMCPPromptResponse>
listResources: (server: MCPServer) => Promise<MCPResource[]>
getResource: ({ server, uri }: { server: MCPServer; uri: string }) => Promise<GetResourceResponse>
getInstallInfo: () => Promise<{ dir: string; uvPath: string; bunPath: string }>
}
copilot: {
@ -175,6 +202,11 @@ declare global {
decryptToken: (token: string) => Promise<{ username: string; access_token: string }>
getDirectoryContents: (token: string, path: string) => Promise<any>
}
searchService: {
openSearchWindow: (uid: string) => Promise<string>
closeSearchWindow: (uid: string) => Promise<string>
openUrlInSearchWindow: (uid: string, url: string) => Promise<string>
}
}
}
}

View File

@ -1,79 +1,90 @@
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import { electronAPI } from '@electron-toolkit/preload'
import { IpcChannel } from '@shared/IpcChannel'
import { FileType, KnowledgeBaseParams, KnowledgeItem, MCPServer, Shortcut, WebDavConfig } from '@types'
import { contextBridge, ipcRenderer, OpenDialogOptions, shell } from 'electron'
import { CreateDirectoryOptions } from 'webdav'
// Custom APIs for renderer
const api = {
getAppInfo: () => ipcRenderer.invoke('app:info'),
reload: () => ipcRenderer.invoke('app:reload'),
setProxy: (proxy: string) => ipcRenderer.invoke('app:proxy', proxy),
checkForUpdate: () => ipcRenderer.invoke('app:check-for-update'),
showUpdateDialog: () => ipcRenderer.invoke('app:show-update-dialog'),
setLanguage: (lang: string) => ipcRenderer.invoke('app:set-language', lang),
setLaunchOnBoot: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-on-boot', isActive),
setLaunchToTray: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-to-tray', isActive),
setTray: (isActive: boolean) => ipcRenderer.invoke('app:set-tray', isActive),
setTrayOnClose: (isActive: boolean) => ipcRenderer.invoke('app:set-tray-on-close', isActive),
restartTray: () => ipcRenderer.invoke('app:restart-tray'),
setTheme: (theme: 'light' | 'dark') => ipcRenderer.invoke('app:set-theme', theme),
openWebsite: (url: string) => ipcRenderer.invoke('open:website', url),
minApp: (url: string) => ipcRenderer.invoke('minapp', url),
clearCache: () => ipcRenderer.invoke('app:clear-cache'),
getAppInfo: () => ipcRenderer.invoke(IpcChannel.App_Info),
reload: () => ipcRenderer.invoke(IpcChannel.App_Reload),
setProxy: (proxy: string) => ipcRenderer.invoke(IpcChannel.App_Proxy, proxy),
checkForUpdate: () => ipcRenderer.invoke(IpcChannel.App_CheckForUpdate),
showUpdateDialog: () => ipcRenderer.invoke(IpcChannel.App_ShowUpdateDialog),
setLanguage: (lang: string) => ipcRenderer.invoke(IpcChannel.App_SetLanguage, lang),
setLaunchOnBoot: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetLaunchOnBoot, isActive),
setLaunchToTray: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetLaunchToTray, isActive),
setTray: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetTray, isActive),
setTrayOnClose: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetTrayOnClose, isActive),
restartTray: () => ipcRenderer.invoke(IpcChannel.App_RestartTray),
setTheme: (theme: 'light' | 'dark') => ipcRenderer.invoke(IpcChannel.App_SetTheme, theme),
setCustomCss: (css: string) => ipcRenderer.invoke(IpcChannel.App_SetCustomCss, css),
setAutoUpdate: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetAutoUpdate, isActive),
openWebsite: (url: string) => ipcRenderer.invoke(IpcChannel.Open_Website, url),
clearCache: () => ipcRenderer.invoke(IpcChannel.App_ClearCache),
sentry: {
init: () => ipcRenderer.invoke(IpcChannel.Sentry_Init)
},
system: {
getDeviceType: () => ipcRenderer.invoke('system:getDeviceType')
getDeviceType: () => ipcRenderer.invoke(IpcChannel.System_GetDeviceType),
getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname)
},
zip: {
compress: (text: string) => ipcRenderer.invoke('zip:compress', text),
decompress: (text: Buffer) => ipcRenderer.invoke('zip:decompress', text)
compress: (text: string) => ipcRenderer.invoke(IpcChannel.Zip_Compress, text),
decompress: (text: Buffer) => ipcRenderer.invoke(IpcChannel.Zip_Decompress, text)
},
backup: {
backup: (fileName: string, data: string, destinationPath?: string) =>
ipcRenderer.invoke('backup:backup', fileName, data, destinationPath),
restore: (backupPath: string) => ipcRenderer.invoke('backup:restore', backupPath),
ipcRenderer.invoke(IpcChannel.Backup_Backup, fileName, data, destinationPath),
restore: (backupPath: string) => ipcRenderer.invoke(IpcChannel.Backup_Restore, backupPath),
backupToWebdav: (data: string, webdavConfig: WebDavConfig) =>
ipcRenderer.invoke('backup:backupToWebdav', data, webdavConfig),
restoreFromWebdav: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:restoreFromWebdav', webdavConfig),
listWebdavFiles: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:listWebdavFiles', webdavConfig),
checkConnection: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:checkConnection', webdavConfig),
ipcRenderer.invoke(IpcChannel.Backup_BackupToWebdav, data, webdavConfig),
restoreFromWebdav: (webdavConfig: WebDavConfig) =>
ipcRenderer.invoke(IpcChannel.Backup_RestoreFromWebdav, webdavConfig),
listWebdavFiles: (webdavConfig: WebDavConfig) =>
ipcRenderer.invoke(IpcChannel.Backup_ListWebdavFiles, webdavConfig),
checkConnection: (webdavConfig: WebDavConfig) =>
ipcRenderer.invoke(IpcChannel.Backup_CheckConnection, webdavConfig),
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) =>
ipcRenderer.invoke('backup:createDirectory', webdavConfig, path, options)
ipcRenderer.invoke(IpcChannel.Backup_CreateDirectory, webdavConfig, path, options),
deleteWebdavFile: (fileName: string, webdavConfig: WebDavConfig) =>
ipcRenderer.invoke(IpcChannel.Backup_DeleteWebdavFile, fileName, webdavConfig)
},
file: {
select: (options?: OpenDialogOptions) => ipcRenderer.invoke('file:select', options),
upload: (filePath: string) => ipcRenderer.invoke('file:upload', filePath),
delete: (fileId: string) => ipcRenderer.invoke('file:delete', fileId),
read: (fileId: string) => ipcRenderer.invoke('file:read', fileId),
clear: () => ipcRenderer.invoke('file:clear'),
get: (filePath: string) => ipcRenderer.invoke('file:get', filePath),
create: (fileName: string) => ipcRenderer.invoke('file:create', fileName),
write: (filePath: string, data: Uint8Array | string) => ipcRenderer.invoke('file:write', filePath, data),
open: (options?: { decompress: boolean }) => ipcRenderer.invoke('file:open', options),
openPath: (path: string) => ipcRenderer.invoke('file:openPath', path),
select: (options?: OpenDialogOptions) => ipcRenderer.invoke(IpcChannel.File_Select, options),
upload: (filePath: string) => ipcRenderer.invoke(IpcChannel.File_Upload, filePath),
delete: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Delete, fileId),
read: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Read, fileId),
clear: () => ipcRenderer.invoke(IpcChannel.File_Clear),
get: (filePath: string) => ipcRenderer.invoke(IpcChannel.File_Get, filePath),
create: (fileName: string) => ipcRenderer.invoke(IpcChannel.File_Create, fileName),
write: (filePath: string, data: Uint8Array | string) => ipcRenderer.invoke(IpcChannel.File_Write, filePath, data),
open: (options?: { decompress: boolean }) => ipcRenderer.invoke(IpcChannel.File_Open, options),
openPath: (path: string) => ipcRenderer.invoke(IpcChannel.File_OpenPath, path),
save: (path: string, content: string, options?: { compress: boolean }) =>
ipcRenderer.invoke('file:save', path, content, options),
selectFolder: () => ipcRenderer.invoke('file:selectFolder'),
saveImage: (name: string, data: string) => ipcRenderer.invoke('file:saveImage', name, data),
base64Image: (fileId: string) => ipcRenderer.invoke('file:base64Image', fileId),
download: (url: string) => ipcRenderer.invoke('file:download', url),
copy: (fileId: string, destPath: string) => ipcRenderer.invoke('file:copy', fileId, destPath),
binaryFile: (fileId: string) => ipcRenderer.invoke('file:binaryFile', fileId)
ipcRenderer.invoke(IpcChannel.File_Save, path, content, options),
selectFolder: () => ipcRenderer.invoke(IpcChannel.File_SelectFolder),
saveImage: (name: string, data: string) => ipcRenderer.invoke(IpcChannel.File_SaveImage, name, data),
base64Image: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Base64Image, fileId),
download: (url: string) => ipcRenderer.invoke(IpcChannel.File_Download, url),
copy: (fileId: string, destPath: string) => ipcRenderer.invoke(IpcChannel.File_Copy, fileId, destPath),
binaryFile: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_BinaryFile, fileId)
},
fs: {
read: (path: string) => ipcRenderer.invoke('fs:read', path)
read: (path: string) => ipcRenderer.invoke(IpcChannel.Fs_Read, path)
},
export: {
toWord: (markdown: string, fileName: string) => ipcRenderer.invoke('export:word', markdown, fileName)
toWord: (markdown: string, fileName: string) => ipcRenderer.invoke(IpcChannel.Export_Word, markdown, fileName)
},
openPath: (path: string) => ipcRenderer.invoke('open:path', path),
openPath: (path: string) => ipcRenderer.invoke(IpcChannel.Open_Path, path),
shortcuts: {
update: (shortcuts: Shortcut[]) => ipcRenderer.invoke('shortcuts:update', shortcuts)
update: (shortcuts: Shortcut[]) => ipcRenderer.invoke(IpcChannel.Shortcuts_Update, shortcuts)
},
knowledgeBase: {
create: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:create', base),
reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:reset', base),
delete: (id: string) => ipcRenderer.invoke('knowledge-base:delete', id),
create: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Create, base),
reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Reset, base),
delete: (id: string) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Delete, id),
add: ({
base,
item,
@ -82,70 +93,80 @@ const api = {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
}) => ipcRenderer.invoke('knowledge-base:add', { base, item, forceReload }),
}) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Add, { base, item, forceReload }),
remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) =>
ipcRenderer.invoke('knowledge-base:remove', { uniqueId, uniqueIds, base }),
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Remove, { uniqueId, uniqueIds, base }),
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) =>
ipcRenderer.invoke('knowledge-base:search', { search, base }),
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Search, { search, base }),
rerank: ({ search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }) =>
ipcRenderer.invoke('knowledge-base:rerank', { search, base, results })
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Rerank, { search, base, results })
},
window: {
setMinimumSize: (width: number, height: number) => ipcRenderer.invoke('window:set-minimum-size', width, height),
resetMinimumSize: () => ipcRenderer.invoke('window:reset-minimum-size')
setMinimumSize: (width: number, height: number) =>
ipcRenderer.invoke(IpcChannel.Windows_SetMinimumSize, width, height),
resetMinimumSize: () => ipcRenderer.invoke(IpcChannel.Windows_ResetMinimumSize)
},
gemini: {
uploadFile: (file: FileType, apiKey: string) => ipcRenderer.invoke('gemini:upload-file', file, apiKey),
base64File: (file: FileType) => ipcRenderer.invoke('gemini:base64-file', file),
retrieveFile: (file: FileType, apiKey: string) => ipcRenderer.invoke('gemini:retrieve-file', file, apiKey),
listFiles: (apiKey: string) => ipcRenderer.invoke('gemini:list-files', apiKey),
deleteFile: (apiKey: string, fileId: string) => ipcRenderer.invoke('gemini:delete-file', apiKey, fileId)
uploadFile: (file: FileType, apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_UploadFile, file, apiKey),
base64File: (file: FileType) => ipcRenderer.invoke(IpcChannel.Gemini_Base64File, file),
retrieveFile: (file: FileType, apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_RetrieveFile, file, apiKey),
listFiles: (apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_ListFiles, apiKey),
deleteFile: (apiKey: string, fileId: string) => ipcRenderer.invoke(IpcChannel.Gemini_DeleteFile, apiKey, fileId)
},
selectionMenu: {
action: (action: string) => ipcRenderer.invoke('selection-menu:action', action)
action: (action: string) => ipcRenderer.invoke(IpcChannel.SelectionMenu_Action, action)
},
config: {
set: (key: string, value: any) => ipcRenderer.invoke('config:set', key, value),
get: (key: string) => ipcRenderer.invoke('config:get', key)
set: (key: string, value: any) => ipcRenderer.invoke(IpcChannel.Config_Set, key, value),
get: (key: string) => ipcRenderer.invoke(IpcChannel.Config_Get, key)
},
miniWindow: {
show: () => ipcRenderer.invoke('miniwindow:show'),
hide: () => ipcRenderer.invoke('miniwindow:hide'),
close: () => ipcRenderer.invoke('miniwindow:close'),
toggle: () => ipcRenderer.invoke('miniwindow:toggle')
show: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Show),
hide: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Hide),
close: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Close),
toggle: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Toggle),
setPin: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.MiniWindow_SetPin, isPinned)
},
aes: {
encrypt: (text: string, secretKey: string, iv: string) => ipcRenderer.invoke('aes:encrypt', text, secretKey, iv),
encrypt: (text: string, secretKey: string, iv: string) =>
ipcRenderer.invoke(IpcChannel.Aes_Encrypt, text, secretKey, iv),
decrypt: (encryptedData: string, iv: string, secretKey: string) =>
ipcRenderer.invoke('aes:decrypt', encryptedData, iv, secretKey)
ipcRenderer.invoke(IpcChannel.Aes_Decrypt, encryptedData, iv, secretKey)
},
mcp: {
removeServer: (server: MCPServer) => ipcRenderer.invoke('mcp:remove-server', server),
restartServer: (server: MCPServer) => ipcRenderer.invoke('mcp:restart-server', server),
stopServer: (server: MCPServer) => ipcRenderer.invoke('mcp:stop-server', server),
listTools: (server: MCPServer) => ipcRenderer.invoke('mcp:list-tools', server),
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) =>
ipcRenderer.invoke('mcp:call-tool', { server, name, args }),
getInstallInfo: () => ipcRenderer.invoke('mcp:get-install-info')
removeServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RemoveServer, server),
restartServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RestartServer, server),
stopServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_StopServer, server),
listTools: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListTools, server),
callTool: ({ server, name, args }: { server: MCPServer; name: string; args?: Record<string, any> }) =>
ipcRenderer.invoke(IpcChannel.Mcp_CallTool, { server, name, args }),
listPrompts: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListPrompts, server),
getPrompt: ({ server, name, args }: { server: MCPServer; name: string; args?: Record<string, any> }) =>
ipcRenderer.invoke(IpcChannel.Mcp_GetPrompt, { server, name, args }),
listResources: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListResources, server),
getResource: ({ server, uri }: { server: MCPServer; uri: string }) =>
ipcRenderer.invoke(IpcChannel.Mcp_GetResource, { server, uri }),
getInstallInfo: () => ipcRenderer.invoke(IpcChannel.Mcp_GetInstallInfo)
},
shell: {
openExternal: shell.openExternal
},
copilot: {
getAuthMessage: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-auth-message', headers),
getAuthMessage: (headers?: Record<string, string>) =>
ipcRenderer.invoke(IpcChannel.Copilot_GetAuthMessage, headers),
getCopilotToken: (device_code: string, headers?: Record<string, string>) =>
ipcRenderer.invoke('copilot:get-copilot-token', device_code, headers),
saveCopilotToken: (access_token: string) => ipcRenderer.invoke('copilot:save-copilot-token', access_token),
getToken: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-token', headers),
logout: () => ipcRenderer.invoke('copilot:logout'),
getUser: (token: string) => ipcRenderer.invoke('copilot:get-user', token)
ipcRenderer.invoke(IpcChannel.Copilot_GetCopilotToken, device_code, headers),
saveCopilotToken: (access_token: string) => ipcRenderer.invoke(IpcChannel.Copilot_SaveCopilotToken, access_token),
getToken: (headers?: Record<string, string>) => ipcRenderer.invoke(IpcChannel.Copilot_GetToken, headers),
logout: () => ipcRenderer.invoke(IpcChannel.Copilot_Logout),
getUser: (token: string) => ipcRenderer.invoke(IpcChannel.Copilot_GetUser, token)
},
// Binary related APIs
isBinaryExist: (name: string) => ipcRenderer.invoke('app:is-binary-exist', name),
getBinaryPath: (name: string) => ipcRenderer.invoke('app:get-binary-path', name),
installUVBinary: () => ipcRenderer.invoke('app:install-uv-binary'),
installBunBinary: () => ipcRenderer.invoke('app:install-bun-binary'),
isBinaryExist: (name: string) => ipcRenderer.invoke(IpcChannel.App_IsBinaryExist, name),
getBinaryPath: (name: string) => ipcRenderer.invoke(IpcChannel.App_GetBinaryPath, name),
installUVBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallUvBinary),
installBunBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallBunBinary),
protocol: {
onReceiveData: (callback: (data: { url: string; params: any }) => void) => {
const listener = (_event: Electron.IpcRendererEvent, data: { url: string; params: any }) => {
@ -158,10 +179,15 @@ const api = {
}
},
nutstore: {
getSSOUrl: () => ipcRenderer.invoke('nutstore:get-sso-url'),
decryptToken: (token: string) => ipcRenderer.invoke('nutstore:decrypt-token', token),
getSSOUrl: () => ipcRenderer.invoke(IpcChannel.Nutstore_GetSsoUrl),
decryptToken: (token: string) => ipcRenderer.invoke(IpcChannel.Nutstore_DecryptToken, token),
getDirectoryContents: (token: string, path: string) =>
ipcRenderer.invoke('nutstore:get-directory-contents', token, path)
ipcRenderer.invoke(IpcChannel.Nutstore_GetDirectoryContents, token, path)
},
searchService: {
openSearchWindow: (uid: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_Open, uid),
closeSearchWindow: (uid: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_Close, uid),
openUrlInSearchWindow: (uid: string, url: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_OpenUrl, uid, url)
}
}
@ -173,9 +199,9 @@ if (process.contextIsolated) {
contextBridge.exposeInMainWorld('electron', electronAPI)
contextBridge.exposeInMainWorld('api', api)
contextBridge.exposeInMainWorld('obsidian', {
getVaults: () => ipcRenderer.invoke('obsidian:get-vaults'),
getFolders: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName),
getFiles: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName)
getVaults: () => ipcRenderer.invoke(IpcChannel.Obsidian_GetVaults),
getFolders: (vaultName: string) => ipcRenderer.invoke(IpcChannel.Obsidian_GetFiles, vaultName),
getFiles: (vaultName: string) => ipcRenderer.invoke(IpcChannel.Obsidian_GetFiles, vaultName)
})
} catch (error) {
console.error(error)

View File

@ -1,43 +1,42 @@
<!doctype html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="initial-scale=1, width=device-width" />
<meta
http-equiv="Content-Security-Policy"
content="default-src 'self'; connect-src blob: *; script-src 'self' 'unsafe-eval' *; worker-src 'self' blob:; style-src 'self' 'unsafe-inline' *; font-src 'self' data: *; img-src 'self' data: file: * blob:; frame-src * file:" />
<title>Cherry Studio</title>
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="initial-scale=1, width=device-width" />
<meta http-equiv="Content-Security-Policy"
content="default-src 'self'; connect-src blob: *; script-src 'self' 'unsafe-eval' *; worker-src 'self' blob:; style-src 'self' 'unsafe-inline' *; font-src 'self' data: *; img-src 'self' data: file: * blob:; frame-src * file:" />
<title>Cherry Studio</title>
<style>
html,
body {
margin: 0;
}
<style>
html,
body {
margin: 0;
}
#spinner {
position: fixed;
width: 100vw;
height: 100vh;
flex-direction: row;
justify-content: center;
align-items: center;
display: none;
}
#spinner {
position: fixed;
width: 100vw;
height: 100vh;
flex-direction: row;
justify-content: center;
align-items: center;
display: none;
}
#spinner img {
width: 100px;
border-radius: 50px;
}
</style>
</head>
#spinner img {
width: 100px;
border-radius: 50px;
}
</style>
</head>
<body>
<div id="root"></div>
<div id="spinner">
<img src="/src/assets/images/logo.png" />
</div>
<script type="module" src="/src/init.ts"></script>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>
<body>
<div id="root"></div>
<div id="spinner">
<img src="/src/assets/images/logo.png" />
</div>
<script type="module" src="/src/init.ts"></script>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1744456106953" class="icon" viewBox="0 0 2633 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1486" xmlns:xlink="http://www.w3.org/1999/xlink" width="514.2578125" height="200"><path d="M0 0v877.843607h731.440328v146.156393H1316.724263v-146.156393h1316.724262V0z m731.440328 731.196014h-146.156393V292.312786h-146.485574v438.883228H146.485574V146.238688h584.954754z m438.880656 0v146.567869h-292.405369V146.238688H1463.209837v585.037049H1170.320984z m1316.888853 0H2341.30033V292.312786h-146.56787v438.883228h-146.485574V292.312786h-145.909508v438.883228H1609.283935V146.238688h878.008197zM1170.238688 292.477377H1316.724263v292.644539h-146.485575z" fill="#CB3837" p-id="1487"></path></svg>

After

Width:  |  Height:  |  Size: 845 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -1,4 +1,4 @@
@keyframes pulse {
@keyframes animation-pulse {
0% {
box-shadow: 0 0 0 0 rgba(var(--pulse-color), 0.5);
}
@ -14,5 +14,5 @@
.animation-pulse {
--pulse-color: 59, 130, 246;
--pulse-size: 8px;
animation: pulse 1.5s infinite;
animation: animation-pulse 1.5s infinite;
}

View File

@ -192,3 +192,18 @@
}
}
}
.ant-dropdown-menu .ant-dropdown-menu-sub {
max-height: 350px;
width: max-content;
overflow-y: auto;
overflow-x: hidden;
}
.ant-collapse {
border: 1px solid var(--color-border);
}
.ant-collapse-content {
border-top: 1px solid var(--color-border) !important;
}

View File

@ -36,11 +36,11 @@
--color-text: var(--color-text-1);
--color-icon: #ffffff99;
--color-icon-white: #ffffff;
--color-border: #ffffff15;
--color-border: #ffffff19;
--color-border-soft: #ffffff10;
--color-border-mute: #ffffff05;
--color-error: #f44336;
--color-link: #1677ff;
--color-link: #338cff;
--color-code-background: #323232;
--color-hover: rgba(40, 40, 40, 1);
--color-active: rgba(55, 55, 55, 1);
@ -80,7 +80,7 @@ body {
body[theme-mode='light'] {
--color-white: #ffffff;
--color-white-soft: #f2f2f2;
--color-white-soft: rgba(0, 0, 0, 0.04);
--color-white-mute: #eee;
--color-black: #1b1b1f;
@ -108,7 +108,7 @@ body[theme-mode='light'] {
--color-text: var(--color-text-1);
--color-icon: #00000099;
--color-icon-white: #000000;
--color-border: #00000015;
--color-border: #00000019;
--color-border-soft: #00000010;
--color-border-mute: #00000005;
--color-error: #f44336;
@ -260,6 +260,7 @@ body,
.markdown,
.anticon,
.iconfont,
.lucide,
.message-tokens {
color: var(--chat-text-user) !important;
}
@ -281,3 +282,7 @@ body,
color: var(--color-text);
}
}
.lucide {
color: var(--color-icon);
}

View File

@ -21,8 +21,9 @@
h6 {
margin: 1em 0 1em 0;
font-weight: 800;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans',
'Helvetica Neue', sans-serif;
font-family:
-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue',
sans-serif;
}
h1 {
@ -170,8 +171,9 @@
th {
background-color: var(--color-background-mute);
font-weight: bold;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans',
'Helvetica Neue', sans-serif;
font-family:
-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue',
sans-serif;
}
img {
@ -295,10 +297,10 @@ emoji-picker {
--border-size: 0;
}
.katex-display{
.katex-display {
overflow-x: auto;
overflow-y: hidden;
}
mjx-container{
mjx-container {
overflow-x: auto;
}
}

View File

@ -0,0 +1,75 @@
import { Collapse } from 'antd'
import { merge } from 'lodash'
import { FC, memo } from 'react'
interface CustomCollapseProps {
label: React.ReactNode
extra: React.ReactNode
children: React.ReactNode
destroyInactivePanel?: boolean
defaultActiveKey?: string[]
activeKey?: string[]
collapsible?: 'header' | 'icon' | 'disabled'
style?: React.CSSProperties
styles?: {
header?: React.CSSProperties
body?: React.CSSProperties
}
}
const CustomCollapse: FC<CustomCollapseProps> = ({
label,
extra,
children,
destroyInactivePanel = false,
defaultActiveKey = ['1'],
activeKey,
collapsible = undefined,
style,
styles
}) => {
const defaultCollapseStyle = {
width: '100%',
background: 'transparent',
border: '0.5px solid var(--color-border)'
}
const defaultCollapseItemStyles = {
header: {
padding: '8px 16px',
alignItems: 'center',
justifyContent: 'space-between',
background: 'var(--color-background-soft)',
borderTopLeftRadius: '8px',
borderTopRightRadius: '8px'
},
body: {
borderTop: 'none'
}
}
const collapseStyle = merge({}, defaultCollapseStyle, style)
const collapseItemStyles = merge({}, defaultCollapseItemStyles, styles)
return (
<Collapse
bordered={false}
style={collapseStyle}
defaultActiveKey={defaultActiveKey}
activeKey={activeKey}
destroyInactivePanel={destroyInactivePanel}
collapsible={collapsible}
items={[
{
styles: collapseItemStyles,
key: '1',
label,
extra,
children
}
]}
/>
)
}
export default memo(CustomCollapse)

View File

@ -0,0 +1,67 @@
import { CloseOutlined } from '@ant-design/icons'
import { Tooltip } from 'antd'
import { FC } from 'react'
import styled from 'styled-components'
interface CustomTagProps {
icon?: React.ReactNode
children?: React.ReactNode | string
color: string
size?: number
tooltip?: string
closable?: boolean
onClose?: () => void
}
const CustomTag: FC<CustomTagProps> = ({ children, icon, color, size = 12, tooltip, closable = false, onClose }) => {
return (
<Tooltip title={tooltip} placement="top">
<Tag $color={color} $size={size} $closable={closable}>
{icon && icon} {children}
{closable && <CloseIcon $size={size} $color={color} onClick={onClose} />}
</Tag>
</Tooltip>
)
}
export default CustomTag
const Tag = styled.div<{ $color: string; $size: number; $closable: boolean }>`
display: inline-flex;
align-items: center;
gap: 4px;
padding: ${({ $size }) => $size / 3}px ${({ $size }) => $size * 0.8}px;
padding-right: ${({ $closable, $size }) => ($closable ? $size * 1.8 : $size * 0.8)}px;
border-radius: 99px;
color: ${({ $color }) => $color};
background-color: ${({ $color }) => $color + '20'};
font-size: ${({ $size }) => $size}px;
line-height: 1;
white-space: nowrap;
position: relative;
.iconfont {
font-size: ${({ $size }) => $size}px;
color: ${({ $color }) => $color};
}
`
const CloseIcon = styled(CloseOutlined)<{ $size: number; $color: string }>`
cursor: pointer;
font-size: ${({ $size }) => $size * 0.8}px;
color: ${({ $color }) => $color};
display: flex;
align-items: center;
justify-content: center;
position: absolute;
right: ${({ $size }) => $size * 0.2}px;
top: ${({ $size }) => $size * 0.2}px;
bottom: ${({ $size }) => $size * 0.2}px;
border-radius: 99px;
transition: all 0.2s ease;
aspect-ratio: 1;
line-height: 1;
&:hover {
background-color: #da8a8a;
color: #ffffff;
}
`

View File

@ -0,0 +1,36 @@
import React, { CSSProperties } from 'react'
import styled from 'styled-components'
interface DividerWithTextProps {
text: string
style?: CSSProperties
}
const DividerWithText: React.FC<DividerWithTextProps> = ({ text, style }) => {
return (
<DividerContainer style={style}>
<DividerText>{text}</DividerText>
<DividerLine />
</DividerContainer>
)
}
const DividerContainer = styled.div`
display: flex;
align-items: center;
margin: 0px 0;
`
const DividerText = styled.span`
font-size: 12px;
color: var(--color-text-2);
margin-right: 8px;
`
const DividerLine = styled.div`
flex: 1;
height: 1px;
background-color: var(--color-border);
`
export default DividerWithText

View File

@ -0,0 +1,49 @@
import { getLeadingEmoji } from '@renderer/utils'
import { FC } from 'react'
import styled from 'styled-components'
interface EmojiIconProps {
emoji: string
className?: string
}
const EmojiIcon: FC<EmojiIconProps> = ({ emoji, className }) => {
const _emoji = getLeadingEmoji(emoji || '⭐️') || '⭐️'
return (
<Container className={className}>
<EmojiBackground>{_emoji}</EmojiBackground>
{_emoji}
</Container>
)
}
const Container = styled.div`
width: 26px;
height: 26px;
border-radius: 13px;
display: flex;
align-items: center;
justify-content: center;
flex-shrink: 0;
font-size: 15px;
position: relative;
overflow: hidden;
margin-right: 3px;
`
const EmojiBackground = styled.div`
width: 100%;
height: 100%;
position: absolute;
inset: 0;
display: flex;
align-items: center;
justify-content: center;
font-size: 200%;
transform: scale(1.5);
filter: blur(5px);
opacity: 0.4;
`
export default EmojiIcon

Some files were not shown because too many files have changed in this diff Show More