mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
Compare commits
167 Commits
1812af6cfe
...
59b76900d7
Author | SHA1 | Date | |
---|---|---|---|
|
59b76900d7 | ||
|
cc28dafede | ||
|
9193bc3143 | ||
|
3b032f086d | ||
|
5be781e007 | ||
|
746838e276 | ||
|
5ea306850e | ||
|
7fe16a9f9d | ||
|
a6be3fdcd0 | ||
|
a7a5c6f4cb | ||
|
f53b658964 | ||
|
70bf321fd7 | ||
|
6da877c6a6 | ||
|
6300e506fb | ||
|
35eafd239d | ||
|
3f2baf0131 | ||
|
a0543ab8fb | ||
|
a5a6969db3 | ||
|
c176494405 | ||
|
1e81476e3a | ||
|
6fbff78b9c | ||
|
0738c2ef54 | ||
|
984e4564f8 | ||
|
bc927868f4 | ||
|
a0758dc2fc | ||
|
db1d2aaff5 | ||
|
4322fdc910 | ||
|
6b759795d5 | ||
|
bba80f465b | ||
|
2a5c5a4e15 | ||
|
ebaa94be15 | ||
|
6699441e53 | ||
|
735e47f5e5 | ||
|
d354c69493 | ||
|
8f14881aff | ||
|
e53c4fc0ad | ||
|
f9f2e68bd8 | ||
|
7b4d67d72f | ||
|
7446244147 | ||
|
c723bd2c96 | ||
|
73ce8a17a5 | ||
|
4b2abf8ac2 | ||
|
365cb4b368 | ||
|
c85bff235d | ||
|
ad16180b1a | ||
|
5ff02b469f | ||
|
44f57ad9a8 | ||
|
94fd6f6901 | ||
|
e61242a337 | ||
|
6f5e010db5 | ||
|
495d86fd96 | ||
|
42ba4e4f0e | ||
|
6af51701de | ||
|
722964667f | ||
|
fbb9c1c249 | ||
|
76104d811c | ||
|
1877433f20 | ||
|
15f341b655 | ||
|
a403fb565d | ||
|
b358490607 | ||
|
f9e4196fd5 | ||
|
751525802d | ||
|
f58b88f319 | ||
|
cd2860deb4 | ||
|
070968a048 | ||
|
2abacd2a2d | ||
|
a3155e0613 | ||
|
ceb18d160f | ||
|
6a63a03cb2 | ||
|
a0a62db6ad | ||
|
253abaf1a3 | ||
|
70b9e4caf5 | ||
|
44cc6157f1 | ||
|
028fc00be6 | ||
|
1aed0fe5d6 | ||
|
317ae9233e | ||
|
300cd675c6 | ||
|
ff1d42bd66 | ||
|
194a99220b | ||
|
926f85ce4f | ||
|
5b8f03cd9d | ||
|
6c6e1e90cd | ||
|
2fbc0c2261 | ||
|
bac1e99557 | ||
|
93e3077f77 | ||
|
fa6858090b | ||
|
61eb655823 | ||
|
2a4783307a | ||
|
bddecba9ed | ||
|
6298332950 | ||
|
a1719c49b7 | ||
|
b10a1cd325 | ||
|
aa88028564 | ||
|
3e314843db | ||
|
0d607a8c90 | ||
|
931e76e3d1 | ||
|
577a948f42 | ||
|
edbfe27eb1 | ||
|
5c98d80fdf | ||
|
89b470d0d5 | ||
|
19dc983d30 | ||
|
e842a46fe2 | ||
|
1573f6f6aa | ||
|
910d5df513 | ||
|
fa01360498 | ||
|
1fe5be532d | ||
|
d67eb907dd | ||
|
83dae7e5bc | ||
|
c6d1b7869d | ||
|
601d267b7a | ||
|
327940a120 | ||
|
70c2ec8ed5 | ||
|
9c7edb9242 | ||
|
0867821ae7 | ||
|
0b2d51d859 | ||
|
ef8022f715 | ||
|
e03ec0032b | ||
|
582c7ce348 | ||
|
13d3f67746 | ||
|
9c6aafd415 | ||
|
3716ea46b5 | ||
|
3c89b8a698 | ||
|
b188800f16 | ||
|
75a037bc2a | ||
|
5efcdd6fa7 | ||
|
06c4627abb | ||
|
f5267d317e | ||
|
2573950f88 | ||
|
2ba38c6c45 | ||
|
62642443ef | ||
|
07edda8a85 | ||
|
3e04c92ff9 | ||
|
7791d290c7 | ||
|
8203b23df2 | ||
|
b77628c458 | ||
|
40c5e6d67a | ||
|
e4d175780e | ||
|
d4c9c76454 | ||
|
a059660ed8 | ||
|
20357beda4 | ||
|
7e39565fd2 | ||
|
16b9665033 | ||
|
b7238caea5 | ||
|
e63c0e3cbb | ||
|
16db2c4e57 | ||
|
bd4a61addd | ||
|
f19c18dc14 | ||
|
570f10d91c | ||
|
9550b884f7 | ||
|
27f794e197 | ||
|
a8e8e36756 | ||
|
4b45ef62ed | ||
|
a1543b7da0 | ||
|
dbc10425c8 | ||
|
810443c511 | ||
|
90087160c6 | ||
|
be33875199 | ||
|
867bf70f1a | ||
|
9018ef30fe | ||
|
f47b5ce63a | ||
|
508f84893f | ||
|
f414d241c1 | ||
|
822c18cb76 | ||
|
66b08e653e | ||
|
5e81150b22 | ||
|
1e62768eed | ||
|
59a9235041 |
54
.github/pull_request_template.md
vendored
54
.github/pull_request_template.md
vendored
|
@ -1,34 +1,32 @@
|
|||
# Checklist:
|
||||
# Summary
|
||||
|
||||
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
|
||||
|
||||
> [!Tip]
|
||||
> Close issue syntax: `Fixes #<issue number>` or `Resolves #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
|
||||
|
||||
|
||||
# Screenshots
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td>Before: </td>
|
||||
<td>After: </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>...</td>
|
||||
<td>...</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
# Checklist
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Please review the checklist below before submitting your pull request.
|
||||
|
||||
- [ ] Please open an issue before creating a PR or link to an existing issue
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
|
||||
|
||||
# Description
|
||||
|
||||
Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue. Close issue syntax: `Fixes #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
|
||||
|
||||
Fixes
|
||||
|
||||
## Type of Change
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
|
||||
- [ ] Improvement, including but not limited to code refactoring, performance optimization, and UI/UX improvement
|
||||
- [ ] Dependency upgrade
|
||||
|
||||
# Testing Instructions
|
||||
|
||||
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
|
||||
|
||||
- [ ] Test A
|
||||
- [ ] Test B
|
||||
|
||||
|
||||
- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
|
||||
- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
|
||||
- [x] I've updated the documentation accordingly.
|
||||
- [x] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat en Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="seguir en X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat sur Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="suivre sur X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="Discordでチャット"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="X(Twitter)でフォロー"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
|
|
180
README_SI.md
Normal file
180
README_SI.md
Normal file
|
@ -0,0 +1,180 @@
|
|||
![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
|
||||
|
||||
<p align="center">
|
||||
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Samostojno gostovanje</a> ·
|
||||
<a href="https://docs.dify.ai">Dokumentacija</a> ·
|
||||
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Povpraševanje za podjetja</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_SI.md"><img alt="README Slovenščina" src="https://img.shields.io/badge/Sloven%C5%A1%C4%8Dina-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
|
||||
Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje.
|
||||
|
||||
## Hitri začetek
|
||||
> Preden namestite Dify, se prepričajte, da vaša naprava izpolnjuje naslednje minimalne sistemske zahteve:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
Najlažji način za zagon strežnika Dify je prek docker compose . Preden zaženete Dify z naslednjimi ukazi, se prepričajte, da sta Docker in Docker Compose nameščena na vašem računalniku:
|
||||
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Po zagonu lahko dostopate do nadzorne plošče Dify v brskalniku na [http://localhost/install](http://localhost/install) in začnete postopek inicializacije.
|
||||
|
||||
#### Iskanje pomoči
|
||||
Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) če naletite na težave pri nastavitvi Dify. Če imate še vedno težave, se obrnite na [skupnost ali nas](#community--contact).
|
||||
|
||||
> Če želite prispevati k Difyju ali narediti dodaten razvoj, glejte naš vodnik za [uvajanje iz izvorne kode](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
|
||||
|
||||
## Ključne značilnosti
|
||||
**1. Potek dela**:
|
||||
Zgradite in preizkusite zmogljive poteke dela AI na vizualnem platnu, pri čemer izkoristite vse naslednje funkcije in več.
|
||||
|
||||
|
||||
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
|
||||
|
||||
|
||||
|
||||
**2. Celovita podpora za modele**:
|
||||
Brezhibna integracija s stotinami lastniških/odprtokodnih LLM-jev ducatov ponudnikov sklepanja in samostojnih rešitev, ki pokrivajo GPT, Mistral, Llama3 in vse modele, združljive z API-jem OpenAI. Celoten seznam podprtih ponudnikov modelov najdete [tukaj](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
|
||||
|
||||
|
||||
**3. Prompt IDE**:
|
||||
intuitivni vmesnik za ustvarjanje pozivov, primerjavo zmogljivosti modela in dodajanje dodatnih funkcij, kot je pretvorba besedila v govor, aplikaciji, ki temelji na klepetu.
|
||||
|
||||
**4. RAG Pipeline**:
|
||||
E Obsežne zmogljivosti RAG, ki pokrivajo vse od vnosa dokumenta do priklica, s podporo za ekstrakcijo besedila iz datotek PDF, PPT in drugih običajnih formatov dokumentov.
|
||||
|
||||
**5. Agent capabilities**:
|
||||
definirate lahko agente, ki temeljijo na klicanju funkcij LLM ali ReAct, in dodate vnaprej izdelana orodja ali orodja po meri za agenta. Dify ponuja več kot 50 vgrajenih orodij za agente AI, kot so Google Search, DALL·E, Stable Diffusion in WolframAlpha.
|
||||
|
||||
**6. LLMOps**:
|
||||
Spremljajte in analizirajte dnevnike aplikacij in učinkovitost skozi čas. Pozive, nabore podatkov in modele lahko nenehno izboljšujete na podlagi proizvodnih podatkov in opomb.
|
||||
|
||||
**7. Backend-as-a-Service**:
|
||||
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
|
||||
|
||||
|
||||
## Uporaba Dify
|
||||
|
||||
- **Cloud </br>**
|
||||
Gostimo storitev Dify Cloud za vsakogar, ki jo lahko preizkusite brez nastavitev. Zagotavlja vse zmožnosti različice za samostojno namestitev in vključuje 200 brezplačnih klicev GPT-4 v načrtu peskovnika.
|
||||
|
||||
- **Self-hosting Dify Community Edition</br>**
|
||||
Hitro zaženite Dify v svojem okolju s tem [začetnim vodnikom](#quick-start) . Za dodatne reference in podrobnejša navodila uporabite našo [dokumentacijo](https://docs.dify.ai) .
|
||||
|
||||
|
||||
- **Dify za podjetja/organizacije</br>**
|
||||
Ponujamo dodatne funkcije, osredotočene na podjetja. Zabeležite svoja vprašanja prek tega klepetalnega robota ali nam pošljite e-pošto, da se pogovorimo o potrebah podjetja. </br>
|
||||
> Za novoustanovljena podjetja in mala podjetja, ki uporabljajo AWS, si oglejte Dify Premium na AWS Marketplace in ga z enim klikom uvedite v svoj AWS VPC. To je cenovno ugodna ponudba AMI z možnostjo ustvarjanja aplikacij z logotipom in blagovno znamko po meri.
|
||||
|
||||
|
||||
## Staying ahead
|
||||
|
||||
Star Dify on GitHub and be instantly notified of new releases.
|
||||
|
||||
![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
|
||||
|
||||
|
||||
## Napredne nastavitve
|
||||
|
||||
Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj .
|
||||
|
||||
Če želite konfigurirati visoko razpoložljivo nastavitev, so na voljo Helm Charts in datoteke YAML, ki jih prispeva skupnost, ki omogočajo uvedbo Difyja v Kubernetes.
|
||||
|
||||
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
|
||||
#### Uporaba Terraform za uvajanje
|
||||
|
||||
namestite Dify v Cloud Platform z enim klikom z uporabo [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Prispevam
|
||||
|
||||
Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah.
|
||||
|
||||
|
||||
|
||||
> Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord .
|
||||
|
||||
## Skupnost in stik
|
||||
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
|
||||
|
||||
**Contributors**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## Star history
|
||||
|
||||
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
|
||||
## Varnostno razkritje
|
||||
|
||||
Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj na GitHub. Namesto tega pošljite vprašanja na security@dify.ai in zagotovili vam bomo podrobnejši odgovor.
|
||||
|
||||
## Licenca
|
||||
|
||||
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="Discord'da sohbet et"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="X(Twitter)'da takip et"></a>
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat trên Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="theo dõi trên X(Twitter)"></a>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
@ -29,6 +30,9 @@ from models import account, dataset, model, source, task, tool, tools, web # no
|
|||
|
||||
# DO NOT REMOVE ABOVE
|
||||
|
||||
if sys.version_info[:2] == (3, 10):
|
||||
print("Warning: Python 3.10 will not be supported in the next version.")
|
||||
|
||||
|
||||
warnings.simplefilter("ignore", ResourceWarning)
|
||||
|
||||
|
@ -49,7 +53,6 @@ if dify_config.TESTING:
|
|||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.set_cookie("remember_token", "", expires=0)
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
|
|
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="0.11.0",
|
||||
default="0.11.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
|
|
@ -17,6 +17,7 @@ language_timezone_mapping = {
|
|||
"hi-IN": "Asia/Kolkata",
|
||||
"tr-TR": "Europe/Istanbul",
|
||||
"fa-IR": "Asia/Tehran",
|
||||
"sl-SI": "Europe/Ljubljana",
|
||||
}
|
||||
|
||||
languages = list(language_timezone_mapping.keys())
|
||||
|
|
|
@ -361,6 +361,7 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
|
||||
output=event.pre_iteration_output,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
)
|
||||
)
|
||||
elif isinstance(event, (IterationRunSucceededEvent | IterationRunFailedEvent)):
|
||||
|
|
|
@ -111,6 +111,7 @@ class QueueIterationNextEvent(AppQueueEvent):
|
|||
"""iteratoin run in parallel mode run id"""
|
||||
node_run_index: int
|
||||
output: Optional[Any] = None # output for the current iteration
|
||||
duration: Optional[float] = None
|
||||
|
||||
@field_validator("output", mode="before")
|
||||
@classmethod
|
||||
|
@ -307,6 +308,8 @@ class QueueNodeSucceededEvent(AppQueueEvent):
|
|||
execution_metadata: Optional[dict[NodeRunMetadataKey, Any]] = None
|
||||
|
||||
error: Optional[str] = None
|
||||
"""single iteration duration map"""
|
||||
iteration_duration_map: Optional[dict[str, float]] = None
|
||||
|
||||
|
||||
class QueueNodeInIterationFailedEvent(AppQueueEvent):
|
||||
|
|
|
@ -434,6 +434,7 @@ class IterationNodeNextStreamResponse(StreamResponse):
|
|||
parallel_id: Optional[str] = None
|
||||
parallel_start_node_id: Optional[str] = None
|
||||
parallel_mode_run_id: Optional[str] = None
|
||||
duration: Optional[float] = None
|
||||
|
||||
event: StreamEvent = StreamEvent.ITERATION_NEXT
|
||||
workflow_run_id: str
|
||||
|
|
|
@ -624,6 +624,7 @@ class WorkflowCycleManage:
|
|||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
model: Qwen2.5-72B-Instruct
|
||||
label:
|
||||
zh_Hans: Qwen2.5-72B-Instruct
|
||||
en_US: Qwen2.5-72B-Instruct
|
||||
model_type: llm
|
||||
features:
|
||||
- agent-thought
|
||||
- tool-call
|
||||
- stream-tool-call
|
||||
model_properties:
|
||||
mode: chat
|
||||
context_size: 32768
|
||||
parameter_rules:
|
||||
- name: max_tokens
|
||||
use_template: max_tokens
|
||||
label:
|
||||
en_US: "Max Tokens"
|
||||
zh_Hans: "最大Token数"
|
||||
type: int
|
||||
default: 512
|
||||
min: 1
|
||||
required: true
|
||||
help:
|
||||
en_US: "The maximum number of tokens that can be generated by the model varies depending on the model."
|
||||
zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。"
|
||||
|
||||
- name: temperature
|
||||
use_template: temperature
|
||||
label:
|
||||
en_US: "Temperature"
|
||||
zh_Hans: "采样温度"
|
||||
type: float
|
||||
default: 0.7
|
||||
min: 0.0
|
||||
max: 1.0
|
||||
precision: 1
|
||||
required: true
|
||||
help:
|
||||
en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time."
|
||||
zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。"
|
||||
|
||||
- name: top_p
|
||||
use_template: top_p
|
||||
label:
|
||||
en_US: "Top P"
|
||||
zh_Hans: "Top P"
|
||||
type: float
|
||||
default: 0.7
|
||||
min: 0.0
|
||||
max: 1.0
|
||||
precision: 1
|
||||
required: true
|
||||
help:
|
||||
en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time."
|
||||
zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。"
|
||||
|
||||
- name: top_k
|
||||
use_template: top_k
|
||||
label:
|
||||
en_US: "Top K"
|
||||
zh_Hans: "Top K"
|
||||
type: int
|
||||
default: 50
|
||||
min: 0
|
||||
max: 100
|
||||
required: true
|
||||
help:
|
||||
en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be."
|
||||
zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。"
|
||||
|
||||
- name: frequency_penalty
|
||||
use_template: frequency_penalty
|
||||
label:
|
||||
en_US: "Frequency Penalty"
|
||||
zh_Hans: "频率惩罚"
|
||||
type: float
|
||||
default: 0
|
||||
min: -1.0
|
||||
max: 1.0
|
||||
precision: 1
|
||||
required: false
|
||||
help:
|
||||
en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation."
|
||||
zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。"
|
||||
|
||||
- name: user
|
||||
use_template: text
|
||||
label:
|
||||
en_US: "User"
|
||||
zh_Hans: "用户"
|
||||
type: string
|
||||
required: false
|
||||
help:
|
||||
en_US: "Used to track and differentiate conversation requests from different users."
|
||||
zh_Hans: "用于追踪和区分不同用户的对话请求。"
|
|
@ -1,3 +1,4 @@
|
|||
- Qwen2.5-72B-Instruct
|
||||
- Qwen2-7B-Instruct
|
||||
- Qwen2-72B-Instruct
|
||||
- Yi-1.5-34B-Chat
|
||||
|
|
|
@ -6,6 +6,7 @@ from core.model_runtime.entities.message_entities import (
|
|||
PromptMessage,
|
||||
PromptMessageTool,
|
||||
)
|
||||
from core.model_runtime.entities.model_entities import ModelFeature
|
||||
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel
|
||||
|
||||
|
||||
|
@ -28,14 +29,13 @@ class GiteeAILargeLanguageModel(OAIAPICompatLargeLanguageModel):
|
|||
user: Optional[str] = None,
|
||||
) -> Union[LLMResult, Generator]:
|
||||
self._add_custom_parameters(credentials, model, model_parameters)
|
||||
return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream)
|
||||
return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user)
|
||||
|
||||
def validate_credentials(self, model: str, credentials: dict) -> None:
|
||||
self._add_custom_parameters(credentials, model, None)
|
||||
super().validate_credentials(model, credentials)
|
||||
|
||||
@staticmethod
|
||||
def _add_custom_parameters(credentials: dict, model: str, model_parameters: dict) -> None:
|
||||
def _add_custom_parameters(self, credentials: dict, model: str, model_parameters: dict) -> None:
|
||||
if model is None:
|
||||
model = "bge-large-zh-v1.5"
|
||||
|
||||
|
@ -45,3 +45,7 @@ class GiteeAILargeLanguageModel(OAIAPICompatLargeLanguageModel):
|
|||
credentials["mode"] = LLMMode.COMPLETION.value
|
||||
else:
|
||||
credentials["mode"] = LLMMode.CHAT.value
|
||||
|
||||
schema = self.get_model_schema(model, credentials)
|
||||
if ModelFeature.TOOL_CALL in schema.features or ModelFeature.MULTI_TOOL_CALL in schema.features:
|
||||
credentials["function_calling_type"] = "tool_call"
|
||||
|
|
|
@ -178,6 +178,7 @@ class ElasticSearchVector(BaseVector):
|
|||
Field.VECTOR.value: { # Make sure the dimension is correct here
|
||||
"type": "dense_vector",
|
||||
"dims": dim,
|
||||
"index": True,
|
||||
"similarity": "cosine",
|
||||
},
|
||||
Field.METADATA_KEY.value: {
|
||||
|
|
|
@ -50,9 +50,9 @@ class WordExtractor(BaseExtractor):
|
|||
|
||||
self.web_path = self.file_path
|
||||
# TODO: use a better way to handle the file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as self.temp_file:
|
||||
self.temp_file.write(r.content)
|
||||
self.file_path = self.temp_file.name
|
||||
self.temp_file = tempfile.NamedTemporaryFile() # noqa: SIM115
|
||||
self.temp_file.write(r.content)
|
||||
self.file_path = self.temp_file.name
|
||||
elif not os.path.isfile(self.file_path):
|
||||
raise ValueError(f"File path {self.file_path} is not a valid file or url")
|
||||
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<svg width="800px" height="800px" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<g>
|
||||
<path fill="none" d="M0 0h24v24H0z"/>
|
||||
<path d="M16 17v-1h-3v-3h3v2h2v2h-1v2h-2v2h-2v-3h2v-1h1zm5 4h-4v-2h2v-2h2v4zM3 3h8v8H3V3zm2 2v4h4V5H5zm8-2h8v8h-8V3zm2 2v4h4V5h-4zM3 13h8v8H3v-8zm2 2v4h4v-4H5zm13-2h3v2h-3v-2zM6 6h2v2H6V6zm0 10h2v2H6v-2zM16 6h2v2h-2V6z"/>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 428 B |
|
@ -1,8 +0,0 @@
|
|||
from typing import Any
|
||||
|
||||
from core.tools.builtin_tool.provider import BuiltinToolProviderController
|
||||
|
||||
|
||||
class QRCodeProvider(BuiltinToolProviderController):
|
||||
def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None:
|
||||
pass
|
|
@ -1,14 +0,0 @@
|
|||
identity:
|
||||
author: Bowen Liang
|
||||
name: qrcode
|
||||
label:
|
||||
en_US: QRCode
|
||||
zh_Hans: 二维码工具
|
||||
pt_BR: QRCode
|
||||
description:
|
||||
en_US: A tool for generating QR code (quick-response code) image.
|
||||
zh_Hans: 一个二维码工具
|
||||
pt_BR: A tool for generating QR code (quick-response code) image.
|
||||
icon: icon.svg
|
||||
tags:
|
||||
- utilities
|
|
@ -1,70 +0,0 @@
|
|||
import io
|
||||
import logging
|
||||
from typing import Any, Union
|
||||
|
||||
from qrcode.constants import ERROR_CORRECT_H, ERROR_CORRECT_L, ERROR_CORRECT_M, ERROR_CORRECT_Q
|
||||
from qrcode.image.base import BaseImage
|
||||
from qrcode.image.pure import PyPNGImage
|
||||
from qrcode.main import QRCode
|
||||
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||
|
||||
|
||||
class QRCodeGeneratorTool(BuiltinTool):
|
||||
error_correction_levels: dict[str, int] = {
|
||||
"L": ERROR_CORRECT_L, # <=7%
|
||||
"M": ERROR_CORRECT_M, # <=15%
|
||||
"Q": ERROR_CORRECT_Q, # <=25%
|
||||
"H": ERROR_CORRECT_H, # <=30%
|
||||
}
|
||||
|
||||
def _invoke(
|
||||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
invoke tools
|
||||
"""
|
||||
# get text content
|
||||
content = tool_parameters.get("content", "")
|
||||
if not content:
|
||||
return self.create_text_message("Invalid parameter content")
|
||||
|
||||
# get border size
|
||||
border = tool_parameters.get("border", 0)
|
||||
if border < 0 or border > 100:
|
||||
return self.create_text_message("Invalid parameter border")
|
||||
|
||||
# get error_correction
|
||||
error_correction = tool_parameters.get("error_correction", "")
|
||||
if error_correction not in self.error_correction_levels:
|
||||
return self.create_text_message("Invalid parameter error_correction")
|
||||
|
||||
try:
|
||||
image = self._generate_qrcode(content, border, error_correction)
|
||||
image_bytes = self._image_to_byte_array(image)
|
||||
return self.create_blob_message(
|
||||
blob=image_bytes, meta={"mime_type": "image/png"}, save_as=self.VariableKey.IMAGE.value
|
||||
)
|
||||
except Exception:
|
||||
logging.exception(f"Failed to generate QR code for content: {content}")
|
||||
return self.create_text_message("Failed to generate QR code")
|
||||
|
||||
def _generate_qrcode(self, content: str, border: int, error_correction: str) -> BaseImage:
|
||||
qr = QRCode(
|
||||
image_factory=PyPNGImage,
|
||||
error_correction=self.error_correction_levels.get(error_correction),
|
||||
border=border,
|
||||
)
|
||||
qr.add_data(data=content)
|
||||
qr.make(fit=True)
|
||||
img = qr.make_image()
|
||||
return img
|
||||
|
||||
@staticmethod
|
||||
def _image_to_byte_array(image: BaseImage) -> bytes:
|
||||
byte_stream = io.BytesIO()
|
||||
image.save(byte_stream)
|
||||
return byte_stream.getvalue()
|
|
@ -1,76 +0,0 @@
|
|||
identity:
|
||||
name: qrcode_generator
|
||||
author: Bowen Liang
|
||||
label:
|
||||
en_US: Generate QR Code
|
||||
zh_Hans: 生成二维码
|
||||
pt_BR: Generate QR Code
|
||||
description:
|
||||
human:
|
||||
en_US: A tool for generating QR code image
|
||||
zh_Hans: 一个用于生成二维码的工具
|
||||
pt_BR: A tool for generating QR code image
|
||||
llm: A tool for generating QR code image
|
||||
parameters:
|
||||
- name: content
|
||||
type: string
|
||||
required: true
|
||||
label:
|
||||
en_US: content text for QR code
|
||||
zh_Hans: 二维码文本内容
|
||||
pt_BR: content text for QR code
|
||||
human_description:
|
||||
en_US: content text for QR code
|
||||
zh_Hans: 二维码文本内容
|
||||
pt_BR: 二维码文本内容
|
||||
form: llm
|
||||
- name: error_correction
|
||||
type: select
|
||||
required: true
|
||||
default: M
|
||||
label:
|
||||
en_US: Error Correction
|
||||
zh_Hans: 容错等级
|
||||
pt_BR: Error Correction
|
||||
human_description:
|
||||
en_US: Error Correction in L, M, Q or H, from low to high, the bigger size of generated QR code with the better error correction effect
|
||||
zh_Hans: 容错等级,可设置为低、中、偏高或高,从低到高,生成的二维码越大且容错效果越好
|
||||
pt_BR: Error Correction in L, M, Q or H, from low to high, the bigger size of generated QR code with the better error correction effect
|
||||
options:
|
||||
- value: L
|
||||
label:
|
||||
en_US: Low
|
||||
zh_Hans: 低
|
||||
pt_BR: Low
|
||||
- value: M
|
||||
label:
|
||||
en_US: Medium
|
||||
zh_Hans: 中
|
||||
pt_BR: Medium
|
||||
- value: Q
|
||||
label:
|
||||
en_US: Quartile
|
||||
zh_Hans: 偏高
|
||||
pt_BR: Quartile
|
||||
- value: H
|
||||
label:
|
||||
en_US: High
|
||||
zh_Hans: 高
|
||||
pt_BR: High
|
||||
form: form
|
||||
- name: border
|
||||
type: number
|
||||
required: true
|
||||
default: 2
|
||||
min: 0
|
||||
max: 100
|
||||
label:
|
||||
en_US: border size
|
||||
zh_Hans: 边框粗细
|
||||
pt_BR: border size
|
||||
human_description:
|
||||
en_US: border size(default to 2)
|
||||
zh_Hans: 边框粗细的格数(默认为2)
|
||||
pt_BR: border size(default to 2)
|
||||
llm: border size, default to 2
|
||||
form: form
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from pytz import timezone as pytz_timezone
|
||||
|
||||
|
@ -12,6 +12,9 @@ class CurrentTimeTool(BuiltinTool):
|
|||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
invoke tools
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import pytz
|
||||
|
||||
|
@ -13,6 +13,9 @@ class LocaltimeToTimestampTool(BuiltinTool):
|
|||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
Convert localtime to timestamp
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import pytz
|
||||
|
||||
|
@ -13,6 +13,9 @@ class TimestampToLocaltimeTool(BuiltinTool):
|
|||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
Convert timestamp to localtime
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import pytz
|
||||
|
||||
|
@ -13,6 +13,9 @@ class TimezoneConversionTool(BuiltinTool):
|
|||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
Convert time to equivalent time zone
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import calendar
|
||||
from datetime import datetime
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||
|
@ -11,6 +11,9 @@ class WeekdayTool(BuiltinTool):
|
|||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||
"""
|
||||
Calculate the day of the week for a given date
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.tools.__base.tool import ToolParameter
|
||||
|
@ -37,6 +37,11 @@ class ToolProviderApiEntity(BaseModel):
|
|||
tools: list[ToolApiEntity] = Field(default_factory=list)
|
||||
labels: list[str] = Field(default_factory=list)
|
||||
|
||||
@field_validator("tools", mode="before")
|
||||
@classmethod
|
||||
def convert_none_to_empty_list(cls, v):
|
||||
return v if v is not None else []
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
# -------------
|
||||
# overwrite tool parameter types for temp fix
|
||||
|
|
|
@ -90,14 +90,16 @@ class ToolEngine:
|
|||
conversation_id=message.conversation_id,
|
||||
)
|
||||
|
||||
message_list = list(messages)
|
||||
|
||||
# extract binary data from tool invoke message
|
||||
binary_files = ToolEngine._extract_tool_response_binary(messages)
|
||||
binary_files = ToolEngine._extract_tool_response_binary_and_text(message_list)
|
||||
# create message file
|
||||
message_files = ToolEngine._create_message_files(
|
||||
tool_messages=binary_files, agent_message=message, invoke_from=invoke_from, user_id=user_id
|
||||
)
|
||||
|
||||
plain_text = ToolEngine._convert_tool_response_to_str(messages)
|
||||
plain_text = ToolEngine._convert_tool_response_to_str(message_list)
|
||||
|
||||
meta = invocation_meta_dict["meta"]
|
||||
|
||||
|
@ -219,7 +221,7 @@ class ToolEngine:
|
|||
yield meta
|
||||
|
||||
@staticmethod
|
||||
def _convert_tool_response_to_str(tool_response: Generator[ToolInvokeMessage, None, None]) -> str:
|
||||
def _convert_tool_response_to_str(tool_response: list[ToolInvokeMessage]) -> str:
|
||||
"""
|
||||
Handle tool response
|
||||
"""
|
||||
|
@ -246,8 +248,8 @@ class ToolEngine:
|
|||
return result
|
||||
|
||||
@staticmethod
|
||||
def _extract_tool_response_binary(
|
||||
tool_response: Generator[ToolInvokeMessage, None, None],
|
||||
def _extract_tool_response_binary_and_text(
|
||||
tool_response: list[ToolInvokeMessage],
|
||||
) -> Generator[ToolInvokeMessageBinary, None, None]:
|
||||
"""
|
||||
Extract tool response binary
|
||||
|
|
|
@ -698,7 +698,11 @@ class ToolManager:
|
|||
"""
|
||||
get api provider
|
||||
"""
|
||||
provider_obj: ApiToolProvider | None = (
|
||||
"""
|
||||
get tool provider
|
||||
"""
|
||||
provider_name = provider
|
||||
provider_obj: ApiToolProvider = (
|
||||
db.session.query(ApiToolProvider)
|
||||
.filter(
|
||||
ApiToolProvider.tenant_id == tenant_id,
|
||||
|
@ -708,7 +712,7 @@ class ToolManager:
|
|||
)
|
||||
|
||||
if provider_obj is None:
|
||||
raise ValueError(f"you have not added provider {provider}")
|
||||
raise ValueError(f"you have not added provider {provider_name}")
|
||||
|
||||
try:
|
||||
credentials = json.loads(provider_obj.credentials_str) or {}
|
||||
|
|
17
api/core/tools/utils/rag_web_reader.py
Normal file
17
api/core/tools/utils/rag_web_reader.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
import re
|
||||
|
||||
|
||||
def get_image_upload_file_ids(content):
|
||||
pattern = r"!\[image\]\((http?://.*?(file-preview|image-preview))\)"
|
||||
matches = re.findall(pattern, content)
|
||||
image_upload_file_ids = []
|
||||
for match in matches:
|
||||
if match[1] == "file-preview":
|
||||
content_pattern = r"files/([^/]+)/file-preview"
|
||||
else:
|
||||
content_pattern = r"files/([^/]+)/image-preview"
|
||||
content_match = re.search(content_pattern, match[0])
|
||||
if content_match:
|
||||
image_upload_file_id = content_match.group(1)
|
||||
image_upload_file_ids.append(image_upload_file_id)
|
||||
return image_upload_file_ids
|
|
@ -17,6 +17,7 @@ from .segments import (
|
|||
from .types import SegmentType
|
||||
from .variables import (
|
||||
ArrayAnyVariable,
|
||||
ArrayFileVariable,
|
||||
ArrayNumberVariable,
|
||||
ArrayObjectVariable,
|
||||
ArrayStringVariable,
|
||||
|
@ -58,4 +59,5 @@ __all__ = [
|
|||
"ArrayStringSegment",
|
||||
"FileSegment",
|
||||
"FileVariable",
|
||||
"ArrayFileVariable",
|
||||
]
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
from collections.abc import Sequence
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from core.helper import encrypter
|
||||
|
||||
from .segments import (
|
||||
ArrayAnySegment,
|
||||
ArrayFileSegment,
|
||||
ArrayNumberSegment,
|
||||
ArrayObjectSegment,
|
||||
ArrayStringSegment,
|
||||
|
@ -24,11 +28,12 @@ class Variable(Segment):
|
|||
"""
|
||||
|
||||
id: str = Field(
|
||||
default="",
|
||||
description="Unique identity for variable. It's only used by environment variables now.",
|
||||
default=lambda _: str(uuid4()),
|
||||
description="Unique identity for variable.",
|
||||
)
|
||||
name: str
|
||||
description: str = Field(default="", description="Description of the variable.")
|
||||
selector: Sequence[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class StringVariable(StringSegment, Variable):
|
||||
|
@ -78,3 +83,7 @@ class NoneVariable(NoneSegment, Variable):
|
|||
|
||||
class FileVariable(FileSegment, Variable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayFileVariable(ArrayFileSegment, Variable):
|
||||
pass
|
||||
|
|
|
@ -24,6 +24,7 @@ class NodeRunMetadataKey(str, Enum):
|
|||
PARENT_PARALLEL_ID = "parent_parallel_id"
|
||||
PARENT_PARALLEL_START_NODE_ID = "parent_parallel_start_node_id"
|
||||
PARALLEL_MODE_RUN_ID = "parallel_mode_run_id"
|
||||
ITERATION_DURATION_MAP = "iteration_duration_map" # single iteration duration if iteration node runs
|
||||
|
||||
|
||||
class NodeRunResult(BaseModel):
|
||||
|
|
|
@ -95,13 +95,16 @@ class VariablePool(BaseModel):
|
|||
if len(selector) < 2:
|
||||
raise ValueError("Invalid selector")
|
||||
|
||||
if isinstance(value, Variable):
|
||||
variable = value
|
||||
if isinstance(value, Segment):
|
||||
v = value
|
||||
variable = variable_factory.segment_to_variable(segment=value, selector=selector)
|
||||
else:
|
||||
v = variable_factory.build_segment(value)
|
||||
segment = variable_factory.build_segment(value)
|
||||
variable = variable_factory.segment_to_variable(segment=segment, selector=selector)
|
||||
|
||||
hash_key = hash(tuple(selector[1:]))
|
||||
self.variable_dictionary[selector[0]][hash_key] = v
|
||||
self.variable_dictionary[selector[0]][hash_key] = variable
|
||||
|
||||
def get(self, selector: Sequence[str], /) -> Segment | None:
|
||||
"""
|
||||
|
|
|
@ -148,6 +148,7 @@ class IterationRunStartedEvent(BaseIterationEvent):
|
|||
class IterationRunNextEvent(BaseIterationEvent):
|
||||
index: int = Field(..., description="index")
|
||||
pre_iteration_output: Optional[Any] = Field(None, description="pre iteration output")
|
||||
duration: Optional[float] = Field(None, description="duration")
|
||||
|
||||
|
||||
class IterationRunSucceededEvent(BaseIterationEvent):
|
||||
|
@ -156,6 +157,7 @@ class IterationRunSucceededEvent(BaseIterationEvent):
|
|||
outputs: Optional[dict[str, Any]] = None
|
||||
metadata: Optional[dict[str, Any]] = None
|
||||
steps: int = 0
|
||||
iteration_duration_map: Optional[dict[str, float]] = None
|
||||
|
||||
|
||||
class IterationRunFailedEvent(BaseIterationEvent):
|
||||
|
|
|
@ -143,14 +143,14 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
|||
|
||||
def _extract_text_from_plain_text(file_content: bytes) -> str:
|
||||
try:
|
||||
return file_content.decode("utf-8")
|
||||
return file_content.decode("utf-8", "ignore")
|
||||
except UnicodeDecodeError as e:
|
||||
raise TextExtractionError("Failed to decode plain text file") from e
|
||||
|
||||
|
||||
def _extract_text_from_json(file_content: bytes) -> str:
|
||||
try:
|
||||
json_data = json.loads(file_content.decode("utf-8"))
|
||||
json_data = json.loads(file_content.decode("utf-8", "ignore"))
|
||||
return json.dumps(json_data, indent=2, ensure_ascii=False)
|
||||
except (UnicodeDecodeError, json.JSONDecodeError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
|
||||
|
@ -159,7 +159,7 @@ def _extract_text_from_json(file_content: bytes) -> str:
|
|||
def _extract_text_from_yaml(file_content: bytes) -> str:
|
||||
"""Extract the content from yaml file"""
|
||||
try:
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8"))
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", "ignore"))
|
||||
return yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False)
|
||||
except (UnicodeDecodeError, yaml.YAMLError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
|
||||
|
@ -217,7 +217,7 @@ def _extract_text_from_file(file: File):
|
|||
|
||||
def _extract_text_from_csv(file_content: bytes) -> str:
|
||||
try:
|
||||
csv_file = io.StringIO(file_content.decode("utf-8"))
|
||||
csv_file = io.StringIO(file_content.decode("utf-8", "ignore"))
|
||||
csv_reader = csv.reader(csv_file)
|
||||
rows = list(csv_reader)
|
||||
|
||||
|
|
|
@ -156,6 +156,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
index=0,
|
||||
pre_iteration_output=None,
|
||||
)
|
||||
iter_run_map: dict[str, float] = {}
|
||||
outputs: list[Any] = [None] * len(iterator_list_value)
|
||||
try:
|
||||
if self.node_data.is_parallel:
|
||||
|
@ -175,6 +176,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
iteration_graph,
|
||||
index,
|
||||
item,
|
||||
iter_run_map,
|
||||
)
|
||||
future.add_done_callback(thread_pool.task_done_callback)
|
||||
futures.append(future)
|
||||
|
@ -213,6 +215,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
start_at,
|
||||
graph_engine,
|
||||
iteration_graph,
|
||||
iter_run_map,
|
||||
)
|
||||
if self.node_data.error_handle_mode == ErrorHandleMode.REMOVE_ABNORMAL_OUTPUT:
|
||||
outputs = [output for output in outputs if output is not None]
|
||||
|
@ -230,7 +233,9 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
|
||||
yield RunCompletedEvent(
|
||||
run_result=NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={"output": jsonable_encoder(outputs)}
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
outputs={"output": jsonable_encoder(outputs)},
|
||||
metadata={NodeRunMetadataKey.ITERATION_DURATION_MAP: iter_run_map},
|
||||
)
|
||||
)
|
||||
except IterationNodeError as e:
|
||||
|
@ -356,15 +361,19 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
start_at: datetime,
|
||||
graph_engine: "GraphEngine",
|
||||
iteration_graph: Graph,
|
||||
iter_run_map: dict[str, float],
|
||||
parallel_mode_run_id: Optional[str] = None,
|
||||
) -> Generator[NodeEvent | InNodeEvent, None, None]:
|
||||
"""
|
||||
run single iteration
|
||||
"""
|
||||
iter_start_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
try:
|
||||
rst = graph_engine.run()
|
||||
# get current iteration index
|
||||
current_index = variable_pool.get([self.node_id, "index"]).value
|
||||
iteration_run_id = parallel_mode_run_id if parallel_mode_run_id is not None else f"{current_index}"
|
||||
next_index = int(current_index) + 1
|
||||
|
||||
if current_index is None:
|
||||
|
@ -431,6 +440,8 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
variable_pool.add([self.node_id, "index"], next_index)
|
||||
if next_index < len(iterator_list_value):
|
||||
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
|
||||
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
|
||||
iter_run_map[iteration_run_id] = duration
|
||||
yield IterationRunNextEvent(
|
||||
iteration_id=self.id,
|
||||
iteration_node_id=self.node_id,
|
||||
|
@ -439,6 +450,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
index=next_index,
|
||||
parallel_mode_run_id=parallel_mode_run_id,
|
||||
pre_iteration_output=None,
|
||||
duration=duration,
|
||||
)
|
||||
return
|
||||
elif self.node_data.error_handle_mode == ErrorHandleMode.REMOVE_ABNORMAL_OUTPUT:
|
||||
|
@ -449,6 +461,8 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
|
||||
if next_index < len(iterator_list_value):
|
||||
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
|
||||
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
|
||||
iter_run_map[iteration_run_id] = duration
|
||||
yield IterationRunNextEvent(
|
||||
iteration_id=self.id,
|
||||
iteration_node_id=self.node_id,
|
||||
|
@ -457,6 +471,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
index=next_index,
|
||||
parallel_mode_run_id=parallel_mode_run_id,
|
||||
pre_iteration_output=None,
|
||||
duration=duration,
|
||||
)
|
||||
return
|
||||
elif self.node_data.error_handle_mode == ErrorHandleMode.TERMINATED:
|
||||
|
@ -474,7 +489,10 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
)
|
||||
yield metadata_event
|
||||
|
||||
current_iteration_output = variable_pool.get(self.node_data.output_selector).value
|
||||
current_output_segment = variable_pool.get(self.node_data.output_selector)
|
||||
if current_output_segment is None:
|
||||
raise IterationNodeError("iteration output selector not found")
|
||||
current_iteration_output = current_output_segment.value
|
||||
outputs[current_index] = current_iteration_output
|
||||
# remove all nodes outputs from variable pool
|
||||
for node_id in iteration_graph.node_ids:
|
||||
|
@ -485,6 +503,8 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
|
||||
if next_index < len(iterator_list_value):
|
||||
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
|
||||
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
|
||||
iter_run_map[iteration_run_id] = duration
|
||||
yield IterationRunNextEvent(
|
||||
iteration_id=self.id,
|
||||
iteration_node_id=self.node_id,
|
||||
|
@ -493,6 +513,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
index=next_index,
|
||||
parallel_mode_run_id=parallel_mode_run_id,
|
||||
pre_iteration_output=jsonable_encoder(current_iteration_output) if current_iteration_output else None,
|
||||
duration=duration,
|
||||
)
|
||||
|
||||
except IterationNodeError as e:
|
||||
|
@ -528,6 +549,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
iteration_graph: Graph,
|
||||
index: int,
|
||||
item: Any,
|
||||
iter_run_map: dict[str, float],
|
||||
) -> Generator[NodeEvent | InNodeEvent, None, None]:
|
||||
"""
|
||||
run single iteration in parallel mode
|
||||
|
@ -546,6 +568,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
start_at=start_at,
|
||||
graph_engine=graph_engine_copy,
|
||||
iteration_graph=iteration_graph,
|
||||
iter_run_map=iter_run_map,
|
||||
parallel_mode_run_id=parallel_mode_run_id,
|
||||
):
|
||||
q.put(event)
|
||||
|
|
|
@ -59,4 +59,4 @@ class ListOperatorNodeData(BaseNodeData):
|
|||
filter_by: FilterBy
|
||||
order_by: OrderBy
|
||||
limit: Limit
|
||||
extract_by: ExtractConfig
|
||||
extract_by: ExtractConfig = Field(default_factory=ExtractConfig)
|
||||
|
|
|
@ -21,7 +21,7 @@ if [[ "${MODE}" == "worker" ]]; then
|
|||
fi
|
||||
|
||||
exec celery -A app.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION --loglevel ${LOG_LEVEL} \
|
||||
-Q ${CELERY_QUEUES:-dataset,generation,mail,ops_trace,app_deletion}
|
||||
-Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion}
|
||||
|
||||
elif [[ "${MODE}" == "beat" ]]; then
|
||||
exec celery -A app.celery beat --loglevel ${LOG_LEVEL}
|
||||
|
|
|
@ -46,7 +46,6 @@ def init_app(app: Flask) -> Celery:
|
|||
broker_connection_retry_on_startup=True,
|
||||
worker_log_format=dify_config.LOG_FORMAT,
|
||||
worker_task_log_format=dify_config.LOG_FORMAT,
|
||||
worker_logfile=dify_config.LOG_FILE,
|
||||
worker_hijack_root_logger=False,
|
||||
timezone=pytz.timezone(dify_config.LOG_TZ),
|
||||
)
|
||||
|
@ -56,6 +55,11 @@ def init_app(app: Flask) -> Celery:
|
|||
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
|
||||
)
|
||||
|
||||
if dify_config.LOG_FILE:
|
||||
celery_app.conf.update(
|
||||
worker_logfile=dify_config.LOG_FILE,
|
||||
)
|
||||
|
||||
celery_app.set_default()
|
||||
app.extensions["celery"] = celery_app
|
||||
|
||||
|
|
|
@ -9,19 +9,21 @@ from configs import dify_config
|
|||
|
||||
|
||||
def init_app(app: Flask):
|
||||
log_handlers = None
|
||||
log_handlers = []
|
||||
log_file = dify_config.LOG_FILE
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
log_handlers = [
|
||||
log_handlers.append(
|
||||
RotatingFileHandler(
|
||||
filename=log_file,
|
||||
maxBytes=dify_config.LOG_FILE_MAX_SIZE * 1024 * 1024,
|
||||
backupCount=dify_config.LOG_FILE_BACKUP_COUNT,
|
||||
),
|
||||
logging.StreamHandler(sys.stdout),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
# Always add StreamHandler to log to console
|
||||
log_handlers.append(logging.StreamHandler(sys.stdout))
|
||||
|
||||
logging.basicConfig(
|
||||
level=dify_config.LOG_LEVEL,
|
||||
|
|
|
@ -180,6 +180,20 @@ def _get_remote_file_info(url: str):
|
|||
return mime_type, filename, file_size
|
||||
|
||||
|
||||
def _get_file_type_by_mimetype(mime_type: str) -> FileType:
|
||||
if "image" in mime_type:
|
||||
file_type = FileType.IMAGE
|
||||
elif "video" in mime_type:
|
||||
file_type = FileType.VIDEO
|
||||
elif "audio" in mime_type:
|
||||
file_type = FileType.AUDIO
|
||||
elif "text" in mime_type or "pdf" in mime_type:
|
||||
file_type = FileType.DOCUMENT
|
||||
else:
|
||||
file_type = FileType.CUSTOM
|
||||
return file_type
|
||||
|
||||
|
||||
def _build_from_tool_file(
|
||||
*,
|
||||
mapping: Mapping[str, Any],
|
||||
|
@ -199,12 +213,13 @@ def _build_from_tool_file(
|
|||
raise ValueError(f"ToolFile {mapping.get('tool_file_id')} not found")
|
||||
|
||||
extension = "." + tool_file.file_key.split(".")[-1] if "." in tool_file.file_key else ".bin"
|
||||
file_type = mapping.get("type", _get_file_type_by_mimetype(tool_file.mimetype))
|
||||
|
||||
return File(
|
||||
id=mapping.get("id"),
|
||||
tenant_id=tenant_id,
|
||||
filename=tool_file.name,
|
||||
type=FileType.value_of(mapping.get("type")),
|
||||
type=file_type,
|
||||
transfer_method=transfer_method,
|
||||
remote_url=tool_file.original_url,
|
||||
related_id=tool_file.id,
|
||||
|
|
|
@ -1,34 +1,65 @@
|
|||
from collections.abc import Mapping
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from configs import dify_config
|
||||
from core.file import File
|
||||
from core.variables import (
|
||||
from core.variables.exc import VariableError
|
||||
from core.variables.segments import (
|
||||
ArrayAnySegment,
|
||||
ArrayFileSegment,
|
||||
ArrayNumberSegment,
|
||||
ArrayNumberVariable,
|
||||
ArrayObjectSegment,
|
||||
ArrayObjectVariable,
|
||||
ArraySegment,
|
||||
ArrayStringSegment,
|
||||
ArrayStringVariable,
|
||||
FileSegment,
|
||||
FloatSegment,
|
||||
FloatVariable,
|
||||
IntegerSegment,
|
||||
IntegerVariable,
|
||||
NoneSegment,
|
||||
ObjectSegment,
|
||||
Segment,
|
||||
StringSegment,
|
||||
)
|
||||
from core.variables.types import SegmentType
|
||||
from core.variables.variables import (
|
||||
ArrayAnyVariable,
|
||||
ArrayFileVariable,
|
||||
ArrayNumberVariable,
|
||||
ArrayObjectVariable,
|
||||
ArrayStringVariable,
|
||||
FileVariable,
|
||||
FloatVariable,
|
||||
IntegerVariable,
|
||||
NoneVariable,
|
||||
ObjectVariable,
|
||||
SecretVariable,
|
||||
Segment,
|
||||
SegmentType,
|
||||
StringSegment,
|
||||
StringVariable,
|
||||
Variable,
|
||||
)
|
||||
from core.variables.exc import VariableError
|
||||
|
||||
|
||||
class InvalidSelectorError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsupportedSegmentTypeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# Define the constant
|
||||
SEGMENT_TO_VARIABLE_MAP = {
|
||||
StringSegment: StringVariable,
|
||||
IntegerSegment: IntegerVariable,
|
||||
FloatSegment: FloatVariable,
|
||||
ObjectSegment: ObjectVariable,
|
||||
FileSegment: FileVariable,
|
||||
ArrayStringSegment: ArrayStringVariable,
|
||||
ArrayNumberSegment: ArrayNumberVariable,
|
||||
ArrayObjectSegment: ArrayObjectVariable,
|
||||
ArrayFileSegment: ArrayFileVariable,
|
||||
ArrayAnySegment: ArrayAnyVariable,
|
||||
NoneSegment: NoneVariable,
|
||||
}
|
||||
|
||||
|
||||
def build_variable_from_mapping(mapping: Mapping[str, Any], /) -> Variable:
|
||||
|
@ -96,3 +127,30 @@ def build_segment(value: Any, /) -> Segment:
|
|||
case _:
|
||||
raise ValueError(f"not supported value {value}")
|
||||
raise ValueError(f"not supported value {value}")
|
||||
|
||||
|
||||
def segment_to_variable(
|
||||
*,
|
||||
segment: Segment,
|
||||
selector: Sequence[str],
|
||||
id: str | None = None,
|
||||
name: str | None = None,
|
||||
description: str = "",
|
||||
) -> Variable:
|
||||
if isinstance(segment, Variable):
|
||||
return segment
|
||||
name = name or selector[-1]
|
||||
id = id or str(uuid4())
|
||||
|
||||
segment_type = type(segment)
|
||||
if segment_type not in SEGMENT_TO_VARIABLE_MAP:
|
||||
raise UnsupportedSegmentTypeError(f"not supported segment type {segment_type}")
|
||||
|
||||
variable_class = SEGMENT_TO_VARIABLE_MAP[segment_type]
|
||||
return variable_class(
|
||||
id=id,
|
||||
name=name,
|
||||
description=description,
|
||||
value=segment.value,
|
||||
selector=selector,
|
||||
)
|
||||
|
|
115
api/poetry.lock
generated
115
api/poetry.lock
generated
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
|
@ -950,6 +950,10 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
|
||||
|
@ -962,8 +966,14 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
|
||||
|
@ -974,8 +984,24 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
|
||||
|
@ -985,6 +1011,10 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
|
||||
|
@ -996,6 +1026,10 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
|
||||
|
@ -1008,6 +1042,10 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
|
||||
|
@ -1020,6 +1058,10 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
|
||||
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
|
||||
|
@ -2432,6 +2474,26 @@ files = [
|
|||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "fal-client"
|
||||
version = "0.5.6"
|
||||
description = "Python client for fal.ai"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fal_client-0.5.6-py3-none-any.whl", hash = "sha256:631fd857a3c44753ee46a2eea1e7276471453aca58faac9c3702f744c7c84050"},
|
||||
{file = "fal_client-0.5.6.tar.gz", hash = "sha256:d3afc4b6250023d0ee8437ec504558231d3b106d7aabc12cda8c39883faddecb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.21.0,<1"
|
||||
httpx-sse = ">=0.4.0,<0.5"
|
||||
|
||||
[package.extras]
|
||||
dev = ["fal-client[docs,test]"]
|
||||
docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme"]
|
||||
test = ["pillow", "pytest", "pytest-asyncio"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.4"
|
||||
|
@ -4070,6 +4132,17 @@ http2 = ["h2 (>=3,<5)"]
|
|||
socks = ["socksio (==1.*)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "httpx-sse"
|
||||
version = "0.4.0"
|
||||
description = "Consume Server-Sent Event (SSE) messages with HTTPX."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"},
|
||||
{file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.16.4"
|
||||
|
@ -8463,29 +8536,29 @@ pyasn1 = ">=0.1.3"
|
|||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.9"
|
||||
version = "0.7.3"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"},
|
||||
{file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"},
|
||||
{file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"},
|
||||
{file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"},
|
||||
{file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"},
|
||||
{file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"},
|
||||
{file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"},
|
||||
{file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"},
|
||||
{file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"},
|
||||
{file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"},
|
||||
{file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"},
|
||||
{file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"},
|
||||
{file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"},
|
||||
{file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"},
|
||||
{file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"},
|
||||
{file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"},
|
||||
{file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"},
|
||||
{file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"},
|
||||
{file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"},
|
||||
{file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"},
|
||||
{file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"},
|
||||
{file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"},
|
||||
{file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"},
|
||||
{file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -10998,4 +11071,4 @@ cffi = ["cffi (>=1.11)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "f20bd678044926913dbbc24bd0cf22503a75817aa55f59457ff7822032139b77"
|
||||
content-hash = "2ba4b464eebc26598f290fa94713acc44c588f902176e6efa80622911d40f0ac"
|
||||
|
|
|
@ -122,6 +122,7 @@ celery = "~5.4.0"
|
|||
chardet = "~5.1.0"
|
||||
cohere = "~5.2.4"
|
||||
dashscope = { version = "~1.17.0", extras = ["tokenizer"] }
|
||||
fal-client = "0.5.6"
|
||||
flask = "~3.0.1"
|
||||
flask-compress = "~1.14"
|
||||
flask-cors = "~4.0.0"
|
||||
|
@ -278,4 +279,4 @@ pytest-mock = "~3.14.0"
|
|||
optional = true
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
dotenv-linter = "~0.5.0"
|
||||
ruff = "~0.6.9"
|
||||
ruff = "~0.7.3"
|
||||
|
|
|
@ -1458,6 +1458,7 @@ class SegmentService:
|
|||
pre_segment_data_list = []
|
||||
segment_data_list = []
|
||||
keywords_list = []
|
||||
position = max_position + 1 if max_position else 1
|
||||
for segment_item in segments:
|
||||
content = segment_item["content"]
|
||||
doc_id = str(uuid.uuid4())
|
||||
|
@ -1475,7 +1476,7 @@ class SegmentService:
|
|||
document_id=document.id,
|
||||
index_node_id=doc_id,
|
||||
index_node_hash=segment_hash,
|
||||
position=max_position + 1 if max_position else 1,
|
||||
position=position,
|
||||
content=content,
|
||||
word_count=len(content),
|
||||
tokens=tokens,
|
||||
|
@ -1490,6 +1491,7 @@ class SegmentService:
|
|||
increment_word_count += segment_document.word_count
|
||||
db.session.add(segment_document)
|
||||
segment_data_list.append(segment_document)
|
||||
position += 1
|
||||
|
||||
pre_segment_data_list.append(segment_document)
|
||||
if "keywords" in segment_item:
|
||||
|
|
|
@ -109,8 +109,10 @@ class ApiToolManageService:
|
|||
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
||||
raise ValueError(f"invalid schema type {schema}")
|
||||
|
||||
provider_name = provider_name.strip()
|
||||
|
||||
# check if the provider exists
|
||||
provider: ApiToolProvider | None = (
|
||||
provider = (
|
||||
db.session.query(ApiToolProvider)
|
||||
.filter(
|
||||
ApiToolProvider.tenant_id == tenant_id,
|
||||
|
@ -249,8 +251,10 @@ class ApiToolManageService:
|
|||
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
||||
raise ValueError(f"invalid schema type {schema}")
|
||||
|
||||
provider_name = provider_name.strip()
|
||||
|
||||
# check if the provider exists
|
||||
provider: ApiToolProvider | None = (
|
||||
provider = (
|
||||
db.session.query(ApiToolProvider)
|
||||
.filter(
|
||||
ApiToolProvider.tenant_id == tenant_id,
|
||||
|
@ -322,7 +326,7 @@ class ApiToolManageService:
|
|||
"""
|
||||
delete tool provider
|
||||
"""
|
||||
provider: ApiToolProvider | None = (
|
||||
provider = (
|
||||
db.session.query(ApiToolProvider)
|
||||
.filter(
|
||||
ApiToolProvider.tenant_id == tenant_id,
|
||||
|
@ -372,7 +376,7 @@ class ApiToolManageService:
|
|||
if tool_bundle is None:
|
||||
raise ValueError(f"invalid tool name {tool_name}")
|
||||
|
||||
db_provider: ApiToolProvider | None = (
|
||||
db_provider = (
|
||||
db.session.query(ApiToolProvider)
|
||||
.filter(
|
||||
ApiToolProvider.tenant_id == tenant_id,
|
||||
|
|
|
@ -5,6 +5,7 @@ import click
|
|||
from celery import shared_task
|
||||
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from core.tools.utils.rag_web_reader import get_image_upload_file_ids
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.dataset import (
|
||||
|
@ -67,6 +68,16 @@ def clean_dataset_task(
|
|||
db.session.delete(document)
|
||||
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
for upload_file_id in image_upload_file_ids:
|
||||
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||
try:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: {}".format(upload_file_id)
|
||||
)
|
||||
db.session.delete(segment)
|
||||
|
||||
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()
|
||||
|
|
|
@ -6,6 +6,7 @@ import click
|
|||
from celery import shared_task
|
||||
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from core.tools.utils.rag_web_reader import get_image_upload_file_ids
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.dataset import Dataset, DocumentSegment
|
||||
|
@ -40,6 +41,16 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
|||
index_processor.clean(dataset, index_node_ids)
|
||||
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
for upload_file_id in image_upload_file_ids:
|
||||
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||
try:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: {}".format(upload_file_id)
|
||||
)
|
||||
db.session.delete(segment)
|
||||
|
||||
db.session.commit()
|
||||
|
|
|
@ -25,7 +25,9 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
|||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset is not found: {}".format(dataset_id), fg="yellow"))
|
||||
return
|
||||
# check document limit
|
||||
features = FeatureService.get_features(dataset.tenant_id)
|
||||
try:
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
from collections import UserDict
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
@ -11,7 +12,7 @@ from pymochow.model.table import Table
|
|||
from requests.adapters import HTTPAdapter
|
||||
|
||||
|
||||
class AttrDict(dict):
|
||||
class AttrDict(UserDict):
|
||||
def __getattr__(self, item):
|
||||
return self.get(item)
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
from collections import UserDict
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
@ -50,7 +51,7 @@ class MockIndex:
|
|||
return AttrDict({"dimension": 1024})
|
||||
|
||||
|
||||
class AttrDict(dict):
|
||||
class AttrDict(UserDict):
|
||||
def __getattr__(self, item):
|
||||
return self.get(item)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from core.helper import encrypter
|
||||
from core.variables import SecretVariable, StringSegment
|
||||
from core.variables import SecretVariable, StringVariable
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
|
||||
|
@ -54,4 +54,5 @@ def test_convert_variable_to_segment_group():
|
|||
segments_group = variable_pool.convert_template(template)
|
||||
assert segments_group.text == "fake-user-id"
|
||||
assert segments_group.log == "fake-user-id"
|
||||
assert segments_group.value == [StringSegment(value="fake-user-id")]
|
||||
assert isinstance(segments_group.value[0], StringVariable)
|
||||
assert segments_group.value[0].value == "fake-user-id"
|
||||
|
|
|
@ -140,6 +140,17 @@ def test_extract_text_from_plain_text():
|
|||
assert text == "Hello, world!"
|
||||
|
||||
|
||||
def test_extract_text_from_plain_text_non_utf8():
|
||||
import tempfile
|
||||
|
||||
non_utf8_content = b"Hello, world\xa9." # \xA9 represents © in Latin-1
|
||||
with tempfile.NamedTemporaryFile(delete=True) as temp_file:
|
||||
temp_file.write(non_utf8_content)
|
||||
temp_file.seek(0)
|
||||
text = _extract_text_from_plain_text(temp_file.read())
|
||||
assert text == "Hello, world."
|
||||
|
||||
|
||||
@patch("pypdfium2.PdfDocument")
|
||||
def test_extract_text_from_pdf(mock_pdf_document):
|
||||
mock_page = Mock()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
from collections import UserDict
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
@ -14,7 +15,7 @@ from tests.unit_tests.oss.__mock.base import (
|
|||
)
|
||||
|
||||
|
||||
class AttrDict(dict):
|
||||
class AttrDict(UserDict):
|
||||
def __getattr__(self, item):
|
||||
return self.get(item)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ version: '3'
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.11.0
|
||||
image: langgenius/dify-api:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'api' starts the API server.
|
||||
|
@ -227,7 +227,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.11.0
|
||||
image: langgenius/dify-api:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_WEB_URL: ''
|
||||
|
@ -397,7 +397,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.11.0
|
||||
image: langgenius/dify-web:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
||||
|
|
|
@ -689,6 +689,9 @@ TEMPLATE_TRANSFORM_MAX_LENGTH=80000
|
|||
CODE_MAX_STRING_ARRAY_LENGTH=30
|
||||
CODE_MAX_OBJECT_ARRAY_LENGTH=30
|
||||
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT=10
|
||||
CODE_EXECUTION_READ_TIMEOUT=60
|
||||
CODE_EXECUTION_WRITE_TIMEOUT=10
|
||||
|
||||
# Workflow runtime configuration
|
||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||
|
|
|
@ -244,6 +244,9 @@ x-shared-env: &shared-api-worker-env
|
|||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5}
|
||||
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
|
||||
CODE_EXECUTION_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10}
|
||||
CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60}
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10}
|
||||
CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807}
|
||||
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808}
|
||||
CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5}
|
||||
|
@ -280,7 +283,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.11.0
|
||||
image: langgenius/dify-api:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
@ -300,7 +303,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.11.0
|
||||
image: langgenius/dify-api:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
@ -319,11 +322,13 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.11.0
|
||||
image: langgenius/dify-web:0.11.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-}
|
||||
MARKETPLACE_URL: ${MARKETPLACE_URL:-}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# base image
|
||||
FROM node:20.11-alpine3.19 AS base
|
||||
FROM node:20-alpine3.20 AS base
|
||||
LABEL maintainer="takatost@gmail.com"
|
||||
|
||||
# if you located in China, you can use aliyun mirror to speed up
|
||||
|
@ -41,6 +41,8 @@ ENV EDITION=SELF_HOSTED
|
|||
ENV DEPLOY_ENV=PRODUCTION
|
||||
ENV CONSOLE_API_URL=http://127.0.0.1:5001
|
||||
ENV APP_API_URL=http://127.0.0.1:5001
|
||||
ENV MARKETPLACE_API_URL=http://127.0.0.1:5001
|
||||
ENV MARKETPLACE_URL=http://127.0.0.1:5001
|
||||
ENV PORT=3000
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
|
|
|
@ -1,19 +1,61 @@
|
|||
import { handleDelete } from './actions'
|
||||
'use client'
|
||||
import Card from '@/app/components/plugins/card'
|
||||
import { customTool, extensionDallE, modelGPT4, toolNotion } from '@/app/components/plugins/card/card-mock'
|
||||
import PluginItem from '@/app/components/plugins/plugin-item'
|
||||
// import PluginItem from '@/app/components/plugins/plugin-item'
|
||||
import CardMoreInfo from '@/app/components/plugins/card/card-more-info'
|
||||
import ProviderCard from '@/app/components/plugins/provider-card'
|
||||
// import ProviderCard from '@/app/components/plugins/provider-card'
|
||||
import Badge from '@/app/components/base/badge'
|
||||
import InstallBundle from '@/app/components/plugins/install-plugin/install-bundle'
|
||||
import { useBoolean } from 'ahooks'
|
||||
|
||||
const PluginList = async () => {
|
||||
const PluginList = () => {
|
||||
const pluginList = [toolNotion, extensionDallE, modelGPT4, customTool]
|
||||
const [isShow, {
|
||||
setFalse: hide,
|
||||
}] = useBoolean(true)
|
||||
|
||||
return (
|
||||
<div className='pb-3 bg-white'>
|
||||
{isShow && (
|
||||
<InstallBundle
|
||||
onClose={hide}
|
||||
fromDSLPayload={[
|
||||
// {
|
||||
// type: 'marketplace',
|
||||
// value: {
|
||||
// plugin_unique_identifier: 'langgenius/google:0.0.2@dcb354c9d0fee60e6e9c9eb996e1e485bbef343ba8cd545c0cfb3ec80970f6f1',
|
||||
// },
|
||||
// },
|
||||
{
|
||||
type: 'github',
|
||||
value: {
|
||||
repo: 'YIXIAO0/test',
|
||||
version: '1.11.5',
|
||||
package: 'test.difypkg',
|
||||
github_plugin_unique_identifier: 'yixiao0/test:0.0.1@3592166c87afcf944b4f13f27467a5c8f9e00bd349cb42033a072734a37431b4',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'github',
|
||||
value: {
|
||||
package: 'dify-test.difypkg',
|
||||
repo: 'WTW0313/dify-test',
|
||||
version: '0.0.5-beta.2',
|
||||
github_plugin_unique_identifier: 'wtw0313/dify-test:0.0.1@1633daa043b47155d4228e2db7734245fd6d3e20ba812e5c02ce69fc1e3038f4',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'marketplace',
|
||||
value: {
|
||||
plugin_unique_identifier: 'langgenius/openai:0.0.1@f88fdb98d104466db16a425bfe3af8c1bcad45047a40fb802d98a989ac57a5a3',
|
||||
},
|
||||
},
|
||||
]} />
|
||||
)
|
||||
}
|
||||
<div className='mx-3 '>
|
||||
<h2 className='my-3'>Dify Plugin list</h2>
|
||||
<div className='grid grid-cols-2 gap-3'>
|
||||
{/* <h2 className='my-3'>Dify Plugin list</h2> */}
|
||||
{/* <div className='grid grid-cols-2 gap-3'>
|
||||
{pluginList.map((plugin, index) => (
|
||||
<PluginItem
|
||||
key={index}
|
||||
|
@ -21,7 +63,7 @@ const PluginList = async () => {
|
|||
onDelete={handleDelete}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div> */}
|
||||
|
||||
<h2 className='my-3'>Install Plugin / Package under bundle</h2>
|
||||
<div className='w-[512px] rounded-2xl bg-background-section-burn p-2'>
|
||||
|
@ -33,21 +75,21 @@ const PluginList = async () => {
|
|||
}
|
||||
/>
|
||||
</div>
|
||||
<h3 className='my-1'>Installed</h3>
|
||||
{/* <h3 className='my-1'>Installed</h3>
|
||||
<div className='w-[512px] rounded-2xl bg-background-section-burn p-2'>
|
||||
<Card
|
||||
payload={toolNotion as any}
|
||||
descriptionLineRows={1}
|
||||
installed
|
||||
/>
|
||||
</div>
|
||||
</div> */}
|
||||
|
||||
<h3 className='my-1'>Install model provide</h3>
|
||||
{/* <h3 className='my-1'>Install model provide</h3>
|
||||
<div className='grid grid-cols-2 gap-3'>
|
||||
{pluginList.map((plugin, index) => (
|
||||
<ProviderCard key={index} payload={plugin as any} />
|
||||
))}
|
||||
</div>
|
||||
</div> */}
|
||||
|
||||
<div className='my-3 h-[px] bg-gray-50'></div>
|
||||
<h2 className='my-3'>Marketplace Plugin list</h2>
|
||||
|
@ -67,8 +109,8 @@ const PluginList = async () => {
|
|||
)
|
||||
}
|
||||
|
||||
export const metadata = {
|
||||
title: 'Plugins - Card',
|
||||
}
|
||||
// export const metadata = {
|
||||
// title: 'Plugins - Card',
|
||||
// }
|
||||
|
||||
export default PluginList
|
||||
|
|
|
@ -23,7 +23,7 @@ const FeaturePanel: FC<IFeaturePanelProps> = ({
|
|||
children,
|
||||
}) => {
|
||||
return (
|
||||
<div className={cn('rounded-xl border-t-[0.5px] border-l-[0.5px] bg-background-section-burn pb-3', noBodySpacing && '!pb-0', className)}>
|
||||
<div className={cn('rounded-xl border-t-[0.5px] border-l-[0.5px] bg-background-section-burn pb-3', noBodySpacing && 'pb-0', className)}>
|
||||
{/* Header */}
|
||||
<div className={cn('px-3 pt-2', hasHeaderBottomBorder && 'border-b border-divider-subtle')}>
|
||||
<div className='flex justify-between items-center h-8'>
|
||||
|
|
|
@ -92,6 +92,7 @@ const AgentTools: FC = () => {
|
|||
tool_name: tool.tool_name,
|
||||
tool_label: tool.tool_label,
|
||||
tool_parameters: tool.params,
|
||||
notAuthor: !tool.is_team_authorization,
|
||||
enabled: true,
|
||||
})
|
||||
})
|
||||
|
@ -101,7 +102,7 @@ const AgentTools: FC = () => {
|
|||
return (
|
||||
<>
|
||||
<Panel
|
||||
className="mt-2"
|
||||
className={cn('mt-2', tools.length === 0 && 'pb-2')}
|
||||
noBodySpacing={tools.length === 0}
|
||||
headerIcon={
|
||||
<RiHammerFill className='w-4 h-4 text-primary-500' />
|
||||
|
|
|
@ -47,12 +47,16 @@ const DatasetConfig: FC = () => {
|
|||
|
||||
const {
|
||||
currentModel: currentRerankModel,
|
||||
currentProvider: currentRerankProvider,
|
||||
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
|
||||
|
||||
const onRemove = (id: string) => {
|
||||
const filteredDataSets = dataSet.filter(item => item.id !== id)
|
||||
setDataSet(filteredDataSets)
|
||||
const retrievalConfig = getMultipleRetrievalConfig(datasetConfigs as any, filteredDataSets, dataSet, !!currentRerankModel)
|
||||
const retrievalConfig = getMultipleRetrievalConfig(datasetConfigs as any, filteredDataSets, dataSet, {
|
||||
provider: currentRerankProvider?.provider,
|
||||
model: currentRerankModel?.model,
|
||||
})
|
||||
setDatasetConfigs({
|
||||
...(datasetConfigs as any),
|
||||
...retrievalConfig,
|
||||
|
|
|
@ -25,7 +25,7 @@ import { useSelectedDatasetsMode } from '@/app/components/workflow/nodes/knowled
|
|||
import Switch from '@/app/components/base/switch'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
interface Props {
|
||||
type Props = {
|
||||
datasetConfigs: DatasetConfigs
|
||||
onChange: (configs: DatasetConfigs, isRetrievalModeChange?: boolean) => void
|
||||
isInWorkflow?: boolean
|
||||
|
@ -172,7 +172,7 @@ const ConfigContent: FC<Props> = ({
|
|||
return false
|
||||
|
||||
return datasetConfigs.reranking_enable
|
||||
}, [canManuallyToggleRerank, datasetConfigs.reranking_enable])
|
||||
}, [canManuallyToggleRerank, datasetConfigs.reranking_enable, isRerankDefaultModelValid])
|
||||
|
||||
const handleDisabledSwitchClick = useCallback(() => {
|
||||
if (!currentRerankModel && !showRerankModel)
|
||||
|
|
|
@ -43,6 +43,7 @@ const ParamsConfig = ({
|
|||
const {
|
||||
defaultModel: rerankDefaultModel,
|
||||
currentModel: isRerankDefaultModelValid,
|
||||
currentProvider: rerankDefaultProvider,
|
||||
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
|
||||
|
||||
const isValid = () => {
|
||||
|
@ -91,7 +92,10 @@ const ParamsConfig = ({
|
|||
reranking_mode: restConfigs.reranking_mode,
|
||||
weights: restConfigs.weights,
|
||||
reranking_enable: restConfigs.reranking_enable,
|
||||
}, selectedDatasets, selectedDatasets, !!isRerankDefaultModelValid)
|
||||
}, selectedDatasets, selectedDatasets, {
|
||||
provider: rerankDefaultProvider?.provider,
|
||||
model: isRerankDefaultModelValid?.model,
|
||||
})
|
||||
|
||||
setTempDataSetConfigs({
|
||||
...retrievalConfig,
|
||||
|
@ -135,11 +139,11 @@ const ParamsConfig = ({
|
|||
/>
|
||||
|
||||
<div className='mt-6 flex justify-end'>
|
||||
<Button className='mr-2 flex-shrink-0' onClick={() => {
|
||||
<Button className='mr-2 shrink-0' onClick={() => {
|
||||
setTempDataSetConfigs(datasetConfigs)
|
||||
setRerankSettingModalOpen(false)
|
||||
}}>{t('common.operation.cancel')}</Button>
|
||||
<Button variant='primary' className='flex-shrink-0' onClick={handleSave} >{t('common.operation.save')}</Button>
|
||||
<Button variant='primary' className='shrink-0' onClick={handleSave} >{t('common.operation.save')}</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
)
|
||||
|
|
|
@ -72,6 +72,7 @@ import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
|||
import NewFeaturePanel from '@/app/components/base/features/new-feature-panel'
|
||||
import { fetchFileUploadConfig } from '@/service/common'
|
||||
import { correctProvider } from '@/utils'
|
||||
import PluginDependency from '@/app/components/workflow/plugin-dependency'
|
||||
|
||||
type PublishConfig = {
|
||||
modelConfig: ModelConfig
|
||||
|
@ -227,6 +228,7 @@ const Configuration: FC = () => {
|
|||
const [rerankSettingModalOpen, setRerankSettingModalOpen] = useState(false)
|
||||
const {
|
||||
currentModel: currentRerankModel,
|
||||
currentProvider: currentRerankProvider,
|
||||
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
|
||||
const handleSelect = (data: DataSet[]) => {
|
||||
if (isEqual(data.map(item => item.id), dataSets.map(item => item.id))) {
|
||||
|
@ -280,7 +282,10 @@ const Configuration: FC = () => {
|
|||
reranking_mode: restConfigs.reranking_mode,
|
||||
weights: restConfigs.weights,
|
||||
reranking_enable: restConfigs.reranking_enable,
|
||||
}, newDatasets, dataSets, !!currentRerankModel)
|
||||
}, newDatasets, dataSets, {
|
||||
provider: currentRerankProvider?.provider,
|
||||
model: currentRerankModel?.model,
|
||||
})
|
||||
|
||||
setDatasetConfigs({
|
||||
...retrievalConfig,
|
||||
|
@ -635,7 +640,10 @@ const Configuration: FC = () => {
|
|||
|
||||
syncToPublishedConfig(config)
|
||||
setPublishedConfig(config)
|
||||
const retrievalConfig = getMultipleRetrievalConfig(modelConfig.dataset_configs, datasets, datasets, !!currentRerankModel)
|
||||
const retrievalConfig = getMultipleRetrievalConfig(modelConfig.dataset_configs, datasets, datasets, {
|
||||
provider: currentRerankProvider?.provider,
|
||||
model: currentRerankModel?.model,
|
||||
})
|
||||
setDatasetConfigs({
|
||||
retrieval_model: RETRIEVE_TYPE.multiWay,
|
||||
...modelConfig.dataset_configs,
|
||||
|
@ -1031,6 +1039,7 @@ const Configuration: FC = () => {
|
|||
onAutoAddPromptVariable={handleAddPromptVariable}
|
||||
/>
|
||||
)}
|
||||
<PluginDependency />
|
||||
</>
|
||||
</FeaturesProvider>
|
||||
</ConfigContext.Provider>
|
||||
|
|
|
@ -21,6 +21,7 @@ import AppsFull from '@/app/components/billing/apps-full-in-dialog'
|
|||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import cn from '@/utils/classnames'
|
||||
import { useMutationCheckDependenciesBeforeImportDSL } from '@/service/use-plugins'
|
||||
|
||||
type CreateFromDSLModalProps = {
|
||||
show: boolean
|
||||
|
@ -43,6 +44,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
|
|||
const [fileContent, setFileContent] = useState<string>()
|
||||
const [currentTab, setCurrentTab] = useState(activeTab)
|
||||
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
|
||||
const { mutateAsync } = useMutationCheckDependenciesBeforeImportDSL()
|
||||
|
||||
const readFile = (file: File) => {
|
||||
const reader = new FileReader()
|
||||
|
@ -81,11 +83,13 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
|
|||
app = await importApp({
|
||||
data: fileContent || '',
|
||||
})
|
||||
await mutateAsync({ dslString: fileContent })
|
||||
}
|
||||
if (currentTab === CreateFromDSLModalTab.FROM_URL) {
|
||||
app = await importAppFromUrl({
|
||||
url: dslUrlValue || '',
|
||||
})
|
||||
await mutateAsync({ url: dslUrlValue })
|
||||
}
|
||||
if (onSuccess)
|
||||
onSuccess()
|
||||
|
|
|
@ -243,7 +243,7 @@ const SettingsModal: FC<ISettingsModalProps> = ({
|
|||
<div className='w-full mt-8'>
|
||||
<p className='system-xs-medium text-gray-500'>{t(`${prefixSettings}.workflow.title`)}</p>
|
||||
<div className='flex justify-between items-center'>
|
||||
<div className='font-medium system-sm-semibold flex-grow text-gray-900'>{t(`${prefixSettings}.workflow.subTitle`)}</div>
|
||||
<div className='font-medium system-sm-semibold grow text-gray-900'>{t(`${prefixSettings}.workflow.subTitle`)}</div>
|
||||
<Switch
|
||||
disabled={!(appInfo.mode === 'workflow' || appInfo.mode === 'advanced-chat')}
|
||||
defaultValue={inputInfo.show_workflow_steps}
|
||||
|
@ -261,11 +261,15 @@ const SettingsModal: FC<ISettingsModalProps> = ({
|
|||
onChange={onChange('chatColorTheme')}
|
||||
placeholder='E.g #A020F0'
|
||||
/>
|
||||
<div className="mt-1 flex justify-between items-center">
|
||||
<p className={`ml-2 ${s.settingsTip} text-gray-500`}>{t(`${prefixSettings}.chatColorThemeInverted`)}</p>
|
||||
<Switch defaultValue={inputInfo.chatColorThemeInverted} onChange={v => setInputInfo({ ...inputInfo, chatColorThemeInverted: v })}></Switch>
|
||||
</div>
|
||||
</>}
|
||||
{systemFeatures.enable_web_sso_switch_component && <div className='w-full mt-8'>
|
||||
<p className='system-xs-medium text-gray-500'>{t(`${prefixSettings}.sso.label`)}</p>
|
||||
<div className='flex justify-between items-center'>
|
||||
<div className='font-medium system-sm-semibold flex-grow text-gray-900'>{t(`${prefixSettings}.sso.title`)}</div>
|
||||
<div className='font-medium system-sm-semibold grow text-gray-900'>{t(`${prefixSettings}.sso.title`)}</div>
|
||||
<Tooltip
|
||||
disabled={systemFeatures.sso_enforced_for_web}
|
||||
popupContent={
|
||||
|
@ -280,8 +284,8 @@ const SettingsModal: FC<ISettingsModalProps> = ({
|
|||
</div>}
|
||||
{!isShowMore && <div className='w-full cursor-pointer mt-8' onClick={() => setIsShowMore(true)}>
|
||||
<div className='flex justify-between'>
|
||||
<div className={`font-medium ${s.settingTitle} flex-grow text-gray-900`}>{t(`${prefixSettings}.more.entry`)}</div>
|
||||
<div className='flex-shrink-0 w-4 h-4 text-gray-500'>
|
||||
<div className={`font-medium ${s.settingTitle} grow text-gray-900`}>{t(`${prefixSettings}.more.entry`)}</div>
|
||||
<div className='shrink-0 w-4 h-4 text-gray-500'>
|
||||
<ChevronRightIcon />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -2,15 +2,15 @@
|
|||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
background-color: #ffffff;
|
||||
background-color: var(--color-components-chat-input-audio-bg-alt);
|
||||
border-radius: 10px;
|
||||
padding: 8px;
|
||||
min-width: 240px;
|
||||
max-width: 420px;
|
||||
max-height: 40px;
|
||||
backdrop-filter: blur(5px);
|
||||
border: 1px solid rgba(16, 24, 40, 0.08);
|
||||
box-shadow: 0 1px 2px rgba(9, 9, 11, 0.05);
|
||||
border: 1px solid var(--color-components-panel-border-subtle);
|
||||
box-shadow: 0 1px 2px var(--color-shadow-shadow-3);
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
|
@ -19,8 +19,8 @@
|
|||
width: 16px;
|
||||
height: 16px;
|
||||
border-radius: 50%;
|
||||
background-color: #296DFF;
|
||||
color: white;
|
||||
background-color: var(--color-components-button-primary-bg);
|
||||
color: var(--color-components-chat-input-audio-bg-alt);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
align-items: center;
|
||||
|
@ -30,16 +30,15 @@
|
|||
}
|
||||
|
||||
.playButton:hover {
|
||||
background-color: #3367d6;
|
||||
background-color: var(--color-components-button-primary-bg-hover);
|
||||
}
|
||||
|
||||
.playButton:disabled {
|
||||
background-color: #bdbdbf;
|
||||
background-color: var(--color-components-button-primary-bg-disabled);
|
||||
}
|
||||
|
||||
.audioControls {
|
||||
flex-grow: 1;
|
||||
|
||||
}
|
||||
|
||||
.progressBarContainer {
|
||||
|
@ -76,8 +75,8 @@
|
|||
|
||||
.timeDisplay {
|
||||
/* position: absolute; */
|
||||
color: #296DFF;
|
||||
border-radius: 2px;
|
||||
color: var(--color-text-accent-secondary);
|
||||
font-size: 12px;
|
||||
order: 0;
|
||||
height: 100%;
|
||||
width: 50px;
|
||||
|
@ -97,7 +96,6 @@
|
|||
} */
|
||||
|
||||
.duration {
|
||||
background-color: rgba(255, 255, 255, 0.8);
|
||||
padding: 2px 4px;
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
@ -114,6 +112,6 @@
|
|||
}
|
||||
|
||||
.playButton svg path,
|
||||
.playButton svg rect{
|
||||
fill:currentColor;
|
||||
.playButton svg rect {
|
||||
fill: currentColor;
|
||||
}
|
|
@ -173,7 +173,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
const conversationInputs: Record<string, any> = {}
|
||||
|
||||
inputsForms.forEach((item: any) => {
|
||||
conversationInputs[item.variable] = item.default || ''
|
||||
conversationInputs[item.variable] = item.default || null
|
||||
})
|
||||
handleNewConversationInputsChange(conversationInputs)
|
||||
}, [handleNewConversationInputsChange, inputsForms])
|
||||
|
|
|
@ -159,7 +159,7 @@ export const useEmbeddedChatbot = () => {
|
|||
const conversationInputs: Record<string, any> = {}
|
||||
|
||||
inputsForms.forEach((item: any) => {
|
||||
conversationInputs[item.variable] = item.default || ''
|
||||
conversationInputs[item.variable] = item.default || null
|
||||
})
|
||||
handleNewConversationInputsChange(conversationInputs)
|
||||
}, [handleNewConversationInputsChange, inputsForms])
|
||||
|
|
|
@ -32,6 +32,7 @@ const CopyBtn = ({
|
|||
<div className={`${className}`}>
|
||||
<Tooltip
|
||||
popupContent={(isCopied ? t('appApi.copied') : t('appApi.copy'))}
|
||||
asChild={false}
|
||||
>
|
||||
<div
|
||||
onMouseLeave={onMouseLeave}
|
||||
|
|
|
@ -59,7 +59,7 @@ const FileFromLinkOrLocal = ({
|
|||
<PortalToFollowElemTrigger onClick={() => setOpen(v => !v)} asChild>
|
||||
{trigger(open)}
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className='z-10'>
|
||||
<PortalToFollowElemContent className='z-[1001]'>
|
||||
<div className='p-3 w-[280px] bg-components-panel-bg-blur border-[0.5px] border-components-panel-border rounded-xl shadow-lg'>
|
||||
{
|
||||
showFromLink && (
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="46" height="24" viewBox="0 0 46 24" fill="none">
|
||||
<path opacity="0.5" d="M-6.5 8C-6.5 3.58172 -2.91828 0 1.5 0H45.5L33.0248 24H1.49999C-2.91829 24 -6.5 20.4183 -6.5 16V8Z" fill="url(#paint0_linear_6333_42118)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_6333_42118" x1="1.81679" y1="5.47784e-07" x2="101.257" y2="30.3866" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="white" stop-opacity="0.12"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0.3"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 561 B |
|
@ -0,0 +1,6 @@
|
|||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="sparkles-soft">
|
||||
<path id="Vector" opacity="0.5" d="M10.9963 1.36798C10.9839 1.25339 10.8909 1.16677 10.7802 1.16666C10.6695 1.16654 10.5763 1.25295 10.5636 1.36752C10.5045 1.90085 10.3525 2.26673 10.1143 2.5149C9.87599 2.76307 9.52476 2.92145 9.01275 2.98296C8.90277 2.99618 8.81983 3.09324 8.81995 3.20856C8.82006 3.32388 8.90322 3.42076 9.0132 3.43373C9.51653 3.49312 9.87583 3.65148 10.1201 3.90135C10.3631 4.14986 10.518 4.51523 10.563 5.04321C10.573 5.16035 10.6673 5.25012 10.7802 5.24999C10.8931 5.24986 10.9872 5.15987 10.9969 5.0427C11.0401 4.52364 11.1949 4.15004 11.4394 3.89528C11.684 3.64052 12.0426 3.47926 12.5409 3.43433C12.6534 3.42419 12.7398 3.32619 12.7399 3.20858C12.7401 3.09097 12.6539 2.99277 12.5414 2.98236C12.0346 2.93546 11.6838 2.77407 11.4452 2.52098C11.2054 2.2665 11.0533 1.89229 10.9963 1.36798Z" fill="#F5F8FF"/>
|
||||
<path id="Vector_2" d="M7.13646 2.85102C7.10442 2.55638 6.8653 2.33365 6.5806 2.33334C6.29595 2.33304 6.05633 2.55526 6.02374 2.84984C5.87186 4.22127 5.48089 5.1621 4.86827 5.80025C4.25565 6.43838 3.35245 6.84566 2.03587 7.00386C1.75307 7.03781 1.53975 7.28742 1.54004 7.58393C1.54033 7.88049 1.75415 8.12958 2.03701 8.16294C3.33132 8.31566 4.25509 8.72289 4.88328 9.36543C5.50807 10.0045 5.90647 10.9439 6.02222 12.3016C6.04793 12.6029 6.29035 12.8337 6.58066 12.8333C6.87102 12.833 7.11294 12.6016 7.13797 12.3003C7.24885 10.9656 7.64695 10.0049 8.27583 9.34979C8.90477 8.69471 9.82698 8.28002 11.1083 8.16452C11.3976 8.13844 11.6197 7.88644 11.62 7.58399C11.6204 7.28159 11.3988 7.02906 11.1096 7.00229C9.8062 6.88171 8.90432 6.46673 8.29084 5.81589C7.674 5.16152 7.28306 4.19926 7.13646 2.85102Z" fill="#F5F8FF"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.7 KiB |
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './BaichuanTextCn.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './BaichuanTextCn.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './Minimax.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Minimax.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './MinimaxText.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './MinimaxText.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './Tongyi.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Tongyi.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './TongyiText.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiText.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './TongyiTextCn.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiTextCn.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './Wxyy.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Wxyy.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './WxyyText.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyText.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import s from './WxyyTextCn.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyTextCn.module.css'
|
||||
|
||||
const Icon = React.forwardRef<HTMLSpanElement, React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement>>((
|
||||
{ className, ...restProps },
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"width": "46",
|
||||
"height": "24",
|
||||
"viewBox": "0 0 46 24",
|
||||
"fill": "none"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"opacity": "0.5",
|
||||
"d": "M-6.5 8C-6.5 3.58172 -2.91828 0 1.5 0H45.5L33.0248 24H1.49999C-2.91829 24 -6.5 20.4183 -6.5 16V8Z",
|
||||
"fill": "url(#paint0_linear_6333_42118)"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "paint0_linear_6333_42118",
|
||||
"x1": "1.81679",
|
||||
"y1": "5.47784e-07",
|
||||
"x2": "101.257",
|
||||
"y2": "30.3866",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"stop-color": "white",
|
||||
"stop-opacity": "0.12"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "white",
|
||||
"stop-opacity": "0.3"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Highlight"
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Highlight.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
|
||||
props,
|
||||
ref,
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />)
|
||||
|
||||
Icon.displayName = 'Highlight'
|
||||
|
||||
export default Icon
|
38
web/app/components/base/icons/src/public/common/Lock.json
Normal file
38
web/app/components/base/icons/src/public/common/Lock.json
Normal file
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "16",
|
||||
"height": "16",
|
||||
"viewBox": "0 0 16 16",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "lock"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"id": "Vector",
|
||||
"fill-rule": "evenodd",
|
||||
"clip-rule": "evenodd",
|
||||
"d": "M8 1.75C6.27411 1.75 4.875 3.14911 4.875 4.875V6.125C3.83947 6.125 3 6.96444 3 8V12.375C3 13.4106 3.83947 14.25 4.875 14.25H11.125C12.1606 14.25 13 13.4106 13 12.375V8C13 6.96444 12.1606 6.125 11.125 6.125V4.875C11.125 3.14911 9.72587 1.75 8 1.75ZM9.875 6.125V4.875C9.875 3.83947 9.03556 3 8 3C6.96444 3 6.125 3.83947 6.125 4.875V6.125H9.875ZM8 8.625C8.34519 8.625 8.625 8.90481 8.625 9.25V11.125C8.625 11.4702 8.34519 11.75 8 11.75C7.65481 11.75 7.375 11.4702 7.375 11.125V9.25C7.375 8.90481 7.65481 8.625 8 8.625Z",
|
||||
"fill": "#155AEF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Lock"
|
||||
}
|
16
web/app/components/base/icons/src/public/common/Lock.tsx
Normal file
16
web/app/components/base/icons/src/public/common/Lock.tsx
Normal file
|
@ -0,0 +1,16 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Lock.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
|
||||
props,
|
||||
ref,
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />)
|
||||
|
||||
Icon.displayName = 'Lock'
|
||||
|
||||
export default Icon
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "14",
|
||||
"height": "14",
|
||||
"viewBox": "0 0 14 14",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "sparkles-soft"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"id": "Vector",
|
||||
"opacity": "0.5",
|
||||
"d": "M10.9963 1.36798C10.9839 1.25339 10.8909 1.16677 10.7802 1.16666C10.6695 1.16654 10.5763 1.25295 10.5636 1.36752C10.5045 1.90085 10.3525 2.26673 10.1143 2.5149C9.87599 2.76307 9.52476 2.92145 9.01275 2.98296C8.90277 2.99618 8.81983 3.09324 8.81995 3.20856C8.82006 3.32388 8.90322 3.42076 9.0132 3.43373C9.51653 3.49312 9.87583 3.65148 10.1201 3.90135C10.3631 4.14986 10.518 4.51523 10.563 5.04321C10.573 5.16035 10.6673 5.25012 10.7802 5.24999C10.8931 5.24986 10.9872 5.15987 10.9969 5.0427C11.0401 4.52364 11.1949 4.15004 11.4394 3.89528C11.684 3.64052 12.0426 3.47926 12.5409 3.43433C12.6534 3.42419 12.7398 3.32619 12.7399 3.20858C12.7401 3.09097 12.6539 2.99277 12.5414 2.98236C12.0346 2.93546 11.6838 2.77407 11.4452 2.52098C11.2054 2.2665 11.0533 1.89229 10.9963 1.36798Z",
|
||||
"fill": "#F5F8FF"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"id": "Vector_2",
|
||||
"d": "M7.13646 2.85102C7.10442 2.55638 6.8653 2.33365 6.5806 2.33334C6.29595 2.33304 6.05633 2.55526 6.02374 2.84984C5.87186 4.22127 5.48089 5.1621 4.86827 5.80025C4.25565 6.43838 3.35245 6.84566 2.03587 7.00386C1.75307 7.03781 1.53975 7.28742 1.54004 7.58393C1.54033 7.88049 1.75415 8.12958 2.03701 8.16294C3.33132 8.31566 4.25509 8.72289 4.88328 9.36543C5.50807 10.0045 5.90647 10.9439 6.02222 12.3016C6.04793 12.6029 6.29035 12.8337 6.58066 12.8333C6.87102 12.833 7.11294 12.6016 7.13797 12.3003C7.24885 10.9656 7.64695 10.0049 8.27583 9.34979C8.90477 8.69471 9.82698 8.28002 11.1083 8.16452C11.3976 8.13844 11.6197 7.88644 11.62 7.58399C11.6204 7.28159 11.3988 7.02906 11.1096 7.00229C9.8062 6.88171 8.90432 6.46673 8.29084 5.81589C7.674 5.16152 7.28306 4.19926 7.13646 2.85102Z",
|
||||
"fill": "#F5F8FF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "SparklesSoft"
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './SparklesSoft.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
|
||||
props,
|
||||
ref,
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />)
|
||||
|
||||
Icon.displayName = 'SparklesSoft'
|
||||
|
||||
export default Icon
|
|
@ -2,8 +2,11 @@ export { default as D } from './D'
|
|||
export { default as DiagonalDividingLine } from './DiagonalDividingLine'
|
||||
export { default as Dify } from './Dify'
|
||||
export { default as Github } from './Github'
|
||||
export { default as Highlight } from './Highlight'
|
||||
export { default as Line3 } from './Line3'
|
||||
export { default as Lock } from './Lock'
|
||||
export { default as MessageChatSquare } from './MessageChatSquare'
|
||||
export { default as MultiPathRetrieval } from './MultiPathRetrieval'
|
||||
export { default as NTo1Retrieval } from './NTo1Retrieval'
|
||||
export { default as Notion } from './Notion'
|
||||
export { default as SparklesSoft } from './SparklesSoft'
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user