mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 19:59:50 +08:00
Merge branch 'fix/clean-unallowed-special-character' into deploy/dev
This commit is contained in:
commit
79dea77a4e
153
README.md
153
README.md
|
@ -46,6 +46,56 @@
|
|||
</p>
|
||||
|
||||
|
||||
## Table of Content
|
||||
0. [Quick-Start🚀](https://github.com/langgenius/dify?tab=readme-ov-file#quick-start)
|
||||
|
||||
1. [Intro📖](https://github.com/langgenius/dify?tab=readme-ov-file#intro)
|
||||
|
||||
2. [How to use🔧](https://github.com/langgenius/dify?tab=readme-ov-file#using-dify)
|
||||
|
||||
3. [Stay Ahead🏃](https://github.com/langgenius/dify?tab=readme-ov-file#staying-ahead)
|
||||
|
||||
4. [Next Steps🏹](https://github.com/langgenius/dify?tab=readme-ov-file#next-steps)
|
||||
|
||||
5. [Contributing💪](https://github.com/langgenius/dify?tab=readme-ov-file#contributing)
|
||||
|
||||
6. [Community and Contact🏠](https://github.com/langgenius/dify?tab=readme-ov-file#community--contact)
|
||||
|
||||
7. [Star-History📈](https://github.com/langgenius/dify?tab=readme-ov-file#star-history)
|
||||
|
||||
8. [Security🔒](https://github.com/langgenius/dify?tab=readme-ov-file#security-disclosure)
|
||||
|
||||
9. [License🤝](https://github.com/langgenius/dify?tab=readme-ov-file#license)
|
||||
|
||||
> Make sure you read through this README before you start utilizing Dify😊
|
||||
|
||||
|
||||
## Quick start
|
||||
The quickest way to deploy Dify locally is to run our [docker-compose.yml](https://github.com/langgenius/dify/blob/main/docker/docker-compose.yaml). Follow the instructions to start in 5 minutes.
|
||||
|
||||
> Before installing Dify, make sure your machine meets the following minimum system requirements:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4 GiB
|
||||
>- Docker and Docker Compose Installed
|
||||
</br>
|
||||
|
||||
Run the following command in your terminal to clone the whole repo.
|
||||
```bash
|
||||
git clone https://github.com/langgenius/dify.git
|
||||
```
|
||||
After cloning,run the following command one by one.
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process. You will be asked to setup an admin account.
|
||||
For more info of quick setup, check [here](https://docs.dify.ai/getting-started/install-self-hosted/docker-compose)
|
||||
|
||||
## Intro
|
||||
Dify is an open-source LLM app development platform. Its intuitive interface combines AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. Here's a list of the core features:
|
||||
</br> </br>
|
||||
|
||||
|
@ -79,73 +129,6 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
|||
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
||||
|
||||
|
||||
## Feature comparison
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Feature</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Programming Approach</td>
|
||||
<td align="center">API + App-oriented</td>
|
||||
<td align="center">Python Code</td>
|
||||
<td align="center">App-oriented</td>
|
||||
<td align="center">API-oriented</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Supported LLMs</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">OpenAI-only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG Engine</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Workflow</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Observability</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Enterprise Features (SSO/Access control)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Local Deployment</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Using Dify
|
||||
|
||||
- **Cloud </br>**
|
||||
|
@ -166,30 +149,21 @@ Star Dify on GitHub and be instantly notified of new releases.
|
|||
|
||||
![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
|
||||
|
||||
|
||||
|
||||
## Quick start
|
||||
> Before installing Dify, make sure your machine meets the following minimum system requirements:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
|
||||
|
||||
> If you'd like to contribute to Dify or do additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
|
||||
|
||||
## Next steps
|
||||
|
||||
Go to [quick-start](https://github.com/langgenius/dify?tab=readme-ov-file#quick-start) to setup your Dify or setup by source code.
|
||||
|
||||
#### If you......
|
||||
If you forget your admin account, you can refer to this [guide](https://docs.dify.ai/getting-started/install-self-hosted/faqs#id-4.-how-to-reset-the-password-of-the-admin-account) to reset the password.
|
||||
|
||||
> Use docker compose up without "-d" to enable logs printing out in your terminal. This might be useful if you have encountered unknow problems when using Dify.
|
||||
|
||||
If you encountered system error and would like to acquire help in Github issues, make sure you always paste logs of the error in the request to accerate the conversation. Go to [Community & contact](https://github.com/langgenius/dify?tab=readme-ov-file#community--contact) for more information.
|
||||
|
||||
> Please read the [Dify Documentation](https://docs.dify.ai/) for detailed how-to-use guidance. Most of the potential problems are explained in the doc.
|
||||
|
||||
> If you'd like to contribute to Dify or make additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
|
||||
|
||||
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
|
||||
|
@ -228,6 +202,7 @@ At the same time, please consider supporting Dify by sharing it on social media
|
|||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
* Make sure a log, if possible, is attached to an error reported to maximize solution efficiency.
|
||||
|
||||
## Star history
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ from configs.middleware.storage.supabase_storage_config import SupabaseStorageCo
|
|||
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
||||
from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
|
||||
from configs.middleware.vdb.baidu_vector_config import BaiduVectorDBConfig
|
||||
from configs.middleware.vdb.chroma_config import ChromaConfig
|
||||
from configs.middleware.vdb.couchbase_config import CouchbaseConfig
|
||||
from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
|
||||
|
@ -259,5 +260,6 @@ class MiddlewareConfig(
|
|||
TidbOnQdrantConfig,
|
||||
UpstashConfig,
|
||||
OceanBaseVectorConfig,
|
||||
BaiduVectorDBConfig,
|
||||
):
|
||||
pass
|
||||
|
|
|
@ -17,6 +17,7 @@ from core.errors.error import ProviderTokenNotInitError
|
|||
from core.llm_generator.llm_generator import LLMGenerator
|
||||
from core.model_manager import ModelInstance, ModelManager
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.rag.cleaner.clean_processor import CleanProcessor
|
||||
from core.rag.datasource.keyword.keyword_factory import Keyword
|
||||
from core.rag.docstore.dataset_docstore import DatasetDocumentStore
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||
|
@ -597,26 +598,9 @@ class IndexingRunner:
|
|||
rules = DatasetProcessRule.AUTOMATIC_RULES
|
||||
else:
|
||||
rules = json.loads(processing_rule.rules) if processing_rule.rules else {}
|
||||
document_text = CleanProcessor.clean(text, rules)
|
||||
|
||||
if "pre_processing_rules" in rules:
|
||||
pre_processing_rules = rules["pre_processing_rules"]
|
||||
for pre_processing_rule in pre_processing_rules:
|
||||
if pre_processing_rule["id"] == "remove_extra_spaces" and pre_processing_rule["enabled"] is True:
|
||||
# Remove extra spaces
|
||||
pattern = r"\n{3,}"
|
||||
text = re.sub(pattern, "\n\n", text)
|
||||
pattern = r"[\t\f\r\x20\u00a0\u1680\u180e\u2000-\u200a\u202f\u205f\u3000]{2,}"
|
||||
text = re.sub(pattern, " ", text)
|
||||
elif pre_processing_rule["id"] == "remove_urls_emails" and pre_processing_rule["enabled"] is True:
|
||||
# Remove email
|
||||
pattern = r"([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)"
|
||||
text = re.sub(pattern, "", text)
|
||||
|
||||
# Remove URL
|
||||
pattern = r"https?://[^\s]+"
|
||||
text = re.sub(pattern, "", text)
|
||||
|
||||
return text
|
||||
return document_text
|
||||
|
||||
@staticmethod
|
||||
def format_split_text(text):
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
|
@ -0,0 +1,3 @@
|
|||
<svg width="1200" height="925" viewBox="0 0 1200 925" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M780.152 250.999L907.882 462.174C907.882 462.174 880.925 510.854 867.43 535.21C834.845 594.039 764.171 612.49 710.442 508.333L420.376 0H0L459.926 803.307C552.303 964.663 787.366 964.663 879.743 803.307C989.874 610.952 1089.87 441.97 1200 249.646L1052.28 0H639.519L780.152 250.999Z" fill="#3366FF"/>
|
||||
</svg>
|
After Width: | Height: | Size: 417 B |
83
api/core/model_runtime/model_providers/vessl_ai/llm/llm.py
Normal file
83
api/core/model_runtime/model_providers/vessl_ai/llm/llm.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
from decimal import Decimal
|
||||
|
||||
from core.model_runtime.entities.common_entities import I18nObject
|
||||
from core.model_runtime.entities.llm_entities import LLMMode
|
||||
from core.model_runtime.entities.model_entities import (
|
||||
AIModelEntity,
|
||||
DefaultParameterName,
|
||||
FetchFrom,
|
||||
ModelPropertyKey,
|
||||
ModelType,
|
||||
ParameterRule,
|
||||
ParameterType,
|
||||
PriceConfig,
|
||||
)
|
||||
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel
|
||||
|
||||
|
||||
class VesslAILargeLanguageModel(OAIAPICompatLargeLanguageModel):
|
||||
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity:
|
||||
features = []
|
||||
|
||||
entity = AIModelEntity(
|
||||
model=model,
|
||||
label=I18nObject(en_US=model),
|
||||
model_type=ModelType.LLM,
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
features=features,
|
||||
model_properties={
|
||||
ModelPropertyKey.MODE: credentials.get("mode"),
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
name=DefaultParameterName.TEMPERATURE.value,
|
||||
label=I18nObject(en_US="Temperature"),
|
||||
type=ParameterType.FLOAT,
|
||||
default=float(credentials.get("temperature", 0.7)),
|
||||
min=0,
|
||||
max=2,
|
||||
precision=2,
|
||||
),
|
||||
ParameterRule(
|
||||
name=DefaultParameterName.TOP_P.value,
|
||||
label=I18nObject(en_US="Top P"),
|
||||
type=ParameterType.FLOAT,
|
||||
default=float(credentials.get("top_p", 1)),
|
||||
min=0,
|
||||
max=1,
|
||||
precision=2,
|
||||
),
|
||||
ParameterRule(
|
||||
name=DefaultParameterName.TOP_K.value,
|
||||
label=I18nObject(en_US="Top K"),
|
||||
type=ParameterType.INT,
|
||||
default=int(credentials.get("top_k", 50)),
|
||||
min=-2147483647,
|
||||
max=2147483647,
|
||||
precision=0,
|
||||
),
|
||||
ParameterRule(
|
||||
name=DefaultParameterName.MAX_TOKENS.value,
|
||||
label=I18nObject(en_US="Max Tokens"),
|
||||
type=ParameterType.INT,
|
||||
default=512,
|
||||
min=1,
|
||||
max=int(credentials.get("max_tokens_to_sample", 4096)),
|
||||
),
|
||||
],
|
||||
pricing=PriceConfig(
|
||||
input=Decimal(credentials.get("input_price", 0)),
|
||||
output=Decimal(credentials.get("output_price", 0)),
|
||||
unit=Decimal(credentials.get("unit", 0)),
|
||||
currency=credentials.get("currency", "USD"),
|
||||
),
|
||||
)
|
||||
|
||||
if credentials["mode"] == "chat":
|
||||
entity.model_properties[ModelPropertyKey.MODE] = LLMMode.CHAT.value
|
||||
elif credentials["mode"] == "completion":
|
||||
entity.model_properties[ModelPropertyKey.MODE] = LLMMode.COMPLETION.value
|
||||
else:
|
||||
raise ValueError(f"Unknown completion type {credentials['completion_type']}")
|
||||
|
||||
return entity
|
10
api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py
Normal file
10
api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
import logging
|
||||
|
||||
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VesslAIProvider(ModelProvider):
|
||||
def validate_provider_credentials(self, credentials: dict) -> None:
|
||||
pass
|
|
@ -0,0 +1,56 @@
|
|||
provider: vessl_ai
|
||||
label:
|
||||
en_US: vessl_ai
|
||||
icon_small:
|
||||
en_US: icon_s_en.svg
|
||||
icon_large:
|
||||
en_US: icon_l_en.png
|
||||
background: "#F1EFED"
|
||||
help:
|
||||
title:
|
||||
en_US: How to deploy VESSL AI LLM Model Endpoint
|
||||
url:
|
||||
en_US: https://docs.vessl.ai/guides/get-started/llama3-deployment
|
||||
supported_model_types:
|
||||
- llm
|
||||
configurate_methods:
|
||||
- customizable-model
|
||||
model_credential_schema:
|
||||
model:
|
||||
label:
|
||||
en_US: Model Name
|
||||
placeholder:
|
||||
en_US: Enter your model name
|
||||
credential_form_schemas:
|
||||
- variable: endpoint_url
|
||||
label:
|
||||
en_US: endpoint url
|
||||
type: text-input
|
||||
required: true
|
||||
placeholder:
|
||||
en_US: Enter the url of your endpoint url
|
||||
- variable: api_key
|
||||
required: true
|
||||
label:
|
||||
en_US: API Key
|
||||
type: secret-input
|
||||
placeholder:
|
||||
en_US: Enter your VESSL AI secret key
|
||||
- variable: mode
|
||||
show_on:
|
||||
- variable: __model_type
|
||||
value: llm
|
||||
label:
|
||||
en_US: Completion mode
|
||||
type: select
|
||||
required: false
|
||||
default: chat
|
||||
placeholder:
|
||||
en_US: Select completion mode
|
||||
options:
|
||||
- value: completion
|
||||
label:
|
||||
en_US: Completion
|
||||
- value: chat
|
||||
label:
|
||||
en_US: Chat
|
|
@ -115,6 +115,7 @@ class _CommonWenxin:
|
|||
"ernie-character-8k-0321": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-char-8k",
|
||||
"ernie-4.0-turbo-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-8k",
|
||||
"ernie-4.0-turbo-8k-preview": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-8k-preview",
|
||||
"ernie-4.0-turbo-128k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-128k",
|
||||
"yi_34b_chat": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/yi_34b_chat",
|
||||
"embedding-v1": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1",
|
||||
"bge-large-en": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/bge_large_en",
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
model: ernie-4.0-turbo-128k
|
||||
label:
|
||||
en_US: Ernie-4.0-turbo-128K
|
||||
model_type: llm
|
||||
features:
|
||||
- agent-thought
|
||||
model_properties:
|
||||
mode: chat
|
||||
context_size: 131072
|
||||
parameter_rules:
|
||||
- name: temperature
|
||||
use_template: temperature
|
||||
min: 0.1
|
||||
max: 1.0
|
||||
default: 0.8
|
||||
- name: top_p
|
||||
use_template: top_p
|
||||
- name: max_tokens
|
||||
use_template: max_tokens
|
||||
default: 1024
|
||||
min: 2
|
||||
max: 4096
|
||||
- name: presence_penalty
|
||||
use_template: presence_penalty
|
||||
default: 1.0
|
||||
min: 1.0
|
||||
max: 2.0
|
||||
- name: frequency_penalty
|
||||
use_template: frequency_penalty
|
||||
- name: response_format
|
||||
use_template: response_format
|
||||
- name: disable_search
|
||||
label:
|
||||
zh_Hans: 禁用搜索
|
||||
en_US: Disable Search
|
||||
type: boolean
|
||||
help:
|
||||
zh_Hans: 禁用模型自行进行外部搜索。
|
||||
en_US: Disable the model to perform external search.
|
||||
required: false
|
|
@ -34,6 +34,8 @@ class RetrievalService:
|
|||
reranking_mode: Optional[str] = "reranking_model",
|
||||
weights: Optional[dict] = None,
|
||||
):
|
||||
if not query:
|
||||
return []
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
return []
|
||||
|
|
|
@ -3,11 +3,13 @@ import time
|
|||
import uuid
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
from pydantic import BaseModel, model_validator
|
||||
from pymochow import MochowClient
|
||||
from pymochow.auth.bce_credentials import BceCredentials
|
||||
from pymochow.configuration import Configuration
|
||||
from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, TableState
|
||||
from pymochow.exception import ServerError
|
||||
from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, ServerErrCode, TableState
|
||||
from pymochow.model.schema import Field, HNSWParams, Schema, VectorIndex
|
||||
from pymochow.model.table import AnnSearch, HNSWSearchParams, Partition, Row
|
||||
|
||||
|
@ -116,6 +118,7 @@ class BaiduVector(BaseVector):
|
|||
self._db.table(self._collection_name).delete(filter=f"{key} = '{value}'")
|
||||
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
query_vector = [float(val) if isinstance(val, np.float64) else val for val in query_vector]
|
||||
anns = AnnSearch(
|
||||
vector_field=self.field_vector,
|
||||
vector_floats=query_vector,
|
||||
|
@ -149,7 +152,13 @@ class BaiduVector(BaseVector):
|
|||
return docs
|
||||
|
||||
def delete(self) -> None:
|
||||
self._db.drop_table(table_name=self._collection_name)
|
||||
try:
|
||||
self._db.drop_table(table_name=self._collection_name)
|
||||
except ServerError as e:
|
||||
if e.code == ServerErrCode.TABLE_NOT_EXIST:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
def _init_client(self, config) -> MochowClient:
|
||||
config = Configuration(credentials=BceCredentials(config.account, config.api_key), endpoint=config.endpoint)
|
||||
|
@ -166,7 +175,14 @@ class BaiduVector(BaseVector):
|
|||
if exists:
|
||||
return self._client.database(self._client_config.database)
|
||||
else:
|
||||
return self._client.create_database(database_name=self._client_config.database)
|
||||
try:
|
||||
self._client.create_database(database_name=self._client_config.database)
|
||||
except ServerError as e:
|
||||
if e.code == ServerErrCode.DB_ALREADY_EXIST:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
return
|
||||
|
||||
def _table_existed(self) -> bool:
|
||||
tables = self._db.list_table()
|
||||
|
@ -175,7 +191,7 @@ class BaiduVector(BaseVector):
|
|||
def _create_table(self, dimension: int) -> None:
|
||||
# Try to grab distributed lock and create table
|
||||
lock_name = "vector_indexing_lock_{}".format(self._collection_name)
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
with redis_client.lock(lock_name, timeout=60):
|
||||
table_exist_cache_key = "vector_indexing_{}".format(self._collection_name)
|
||||
if redis_client.get(table_exist_cache_key):
|
||||
return
|
||||
|
@ -238,15 +254,14 @@ class BaiduVector(BaseVector):
|
|||
description="Table for Dify",
|
||||
)
|
||||
|
||||
# Wait for table created
|
||||
while True:
|
||||
time.sleep(1)
|
||||
table = self._db.describe_table(self._collection_name)
|
||||
if table.state == TableState.NORMAL:
|
||||
break
|
||||
redis_client.set(table_exist_cache_key, 1, ex=3600)
|
||||
|
||||
# Wait for table created
|
||||
while True:
|
||||
time.sleep(1)
|
||||
table = self._db.describe_table(self._collection_name)
|
||||
if table.state == TableState.NORMAL:
|
||||
break
|
||||
|
||||
|
||||
class BaiduVectorFactory(AbstractVectorFactory):
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> BaiduVector:
|
||||
|
|
|
@ -33,7 +33,9 @@ class BarChartTool(BuiltinTool):
|
|||
if axis:
|
||||
axis = [label[:10] + "..." if len(label) > 10 else label for label in axis]
|
||||
ax.set_xticklabels(axis, rotation=45, ha="right")
|
||||
ax.bar(axis, data)
|
||||
# ensure all labels, including duplicates, are correctly displayed
|
||||
ax.bar(range(len(data)), data)
|
||||
ax.set_xticks(range(len(data)))
|
||||
else:
|
||||
ax.bar(range(len(data)), data)
|
||||
|
||||
|
|
44
api/poetry.lock
generated
44
api/poetry.lock
generated
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
|
@ -932,10 +932,6 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
|
||||
|
@ -948,14 +944,8 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
|
||||
|
@ -966,24 +956,8 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
|
||||
|
@ -993,10 +967,6 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
|
||||
|
@ -1008,10 +978,6 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
|
||||
|
@ -1024,10 +990,6 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
|
||||
|
@ -1040,10 +1002,6 @@ files = [
|
|||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
|
||||
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
|
||||
|
|
|
@ -84,5 +84,10 @@ VOLC_EMBEDDING_ENDPOINT_ID=
|
|||
# 360 AI Credentials
|
||||
ZHINAO_API_KEY=
|
||||
|
||||
# VESSL AI Credentials
|
||||
VESSL_AI_MODEL_NAME=
|
||||
VESSL_AI_API_KEY=
|
||||
VESSL_AI_ENDPOINT_URL=
|
||||
|
||||
# Gitee AI Credentials
|
||||
GITEE_AI_API_KEY=
|
131
api/tests/integration_tests/model_runtime/vessl_ai/test_llm.py
Normal file
131
api/tests/integration_tests/model_runtime/vessl_ai/test_llm.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
import os
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
AssistantPromptMessage,
|
||||
SystemPromptMessage,
|
||||
UserPromptMessage,
|
||||
)
|
||||
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||
from core.model_runtime.model_providers.vessl_ai.llm.llm import VesslAILargeLanguageModel
|
||||
|
||||
|
||||
def test_validate_credentials():
|
||||
model = VesslAILargeLanguageModel()
|
||||
|
||||
with pytest.raises(CredentialsValidateFailedError):
|
||||
model.validate_credentials(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": "invalid_key",
|
||||
"endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"),
|
||||
"mode": "chat",
|
||||
},
|
||||
)
|
||||
|
||||
with pytest.raises(CredentialsValidateFailedError):
|
||||
model.validate_credentials(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": os.environ.get("VESSL_AI_API_KEY"),
|
||||
"endpoint_url": "http://invalid_url",
|
||||
"mode": "chat",
|
||||
},
|
||||
)
|
||||
|
||||
model.validate_credentials(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": os.environ.get("VESSL_AI_API_KEY"),
|
||||
"endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"),
|
||||
"mode": "chat",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_invoke_model():
|
||||
model = VesslAILargeLanguageModel()
|
||||
|
||||
response = model.invoke(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": os.environ.get("VESSL_AI_API_KEY"),
|
||||
"endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"),
|
||||
"mode": "chat",
|
||||
},
|
||||
prompt_messages=[
|
||||
SystemPromptMessage(
|
||||
content="You are a helpful AI assistant.",
|
||||
),
|
||||
UserPromptMessage(content="Who are you?"),
|
||||
],
|
||||
model_parameters={
|
||||
"temperature": 1.0,
|
||||
"top_k": 2,
|
||||
"top_p": 0.5,
|
||||
},
|
||||
stop=["How"],
|
||||
stream=False,
|
||||
user="abc-123",
|
||||
)
|
||||
|
||||
assert isinstance(response, LLMResult)
|
||||
assert len(response.message.content) > 0
|
||||
|
||||
|
||||
def test_invoke_stream_model():
|
||||
model = VesslAILargeLanguageModel()
|
||||
|
||||
response = model.invoke(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": os.environ.get("VESSL_AI_API_KEY"),
|
||||
"endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"),
|
||||
"mode": "chat",
|
||||
},
|
||||
prompt_messages=[
|
||||
SystemPromptMessage(
|
||||
content="You are a helpful AI assistant.",
|
||||
),
|
||||
UserPromptMessage(content="Who are you?"),
|
||||
],
|
||||
model_parameters={
|
||||
"temperature": 1.0,
|
||||
"top_k": 2,
|
||||
"top_p": 0.5,
|
||||
},
|
||||
stop=["How"],
|
||||
stream=True,
|
||||
user="abc-123",
|
||||
)
|
||||
|
||||
assert isinstance(response, Generator)
|
||||
|
||||
for chunk in response:
|
||||
assert isinstance(chunk, LLMResultChunk)
|
||||
assert isinstance(chunk.delta, LLMResultChunkDelta)
|
||||
assert isinstance(chunk.delta.message, AssistantPromptMessage)
|
||||
|
||||
|
||||
def test_get_num_tokens():
|
||||
model = VesslAILargeLanguageModel()
|
||||
|
||||
num_tokens = model.get_num_tokens(
|
||||
model=os.environ.get("VESSL_AI_MODEL_NAME"),
|
||||
credentials={
|
||||
"api_key": os.environ.get("VESSL_AI_API_KEY"),
|
||||
"endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"),
|
||||
},
|
||||
prompt_messages=[
|
||||
SystemPromptMessage(
|
||||
content="You are a helpful AI assistant.",
|
||||
),
|
||||
UserPromptMessage(content="Hello World!"),
|
||||
],
|
||||
)
|
||||
|
||||
assert isinstance(num_tokens, int)
|
||||
assert num_tokens == 21
|
|
@ -31,6 +31,7 @@ export type Props = {
|
|||
noWrapper?: boolean
|
||||
isExpand?: boolean
|
||||
showFileList?: boolean
|
||||
onGenerated?: (value: string) => void
|
||||
showCodeGenerator?: boolean
|
||||
}
|
||||
|
||||
|
@ -64,6 +65,7 @@ const CodeEditor: FC<Props> = ({
|
|||
noWrapper,
|
||||
isExpand,
|
||||
showFileList,
|
||||
onGenerated,
|
||||
showCodeGenerator = false,
|
||||
}) => {
|
||||
const [isFocus, setIsFocus] = React.useState(false)
|
||||
|
@ -151,9 +153,6 @@ const CodeEditor: FC<Props> = ({
|
|||
|
||||
return isFocus ? 'focus-theme' : 'blur-theme'
|
||||
})()
|
||||
const handleGenerated = (code: string) => {
|
||||
handleEditorChange(code)
|
||||
}
|
||||
|
||||
const main = (
|
||||
<>
|
||||
|
@ -205,7 +204,7 @@ const CodeEditor: FC<Props> = ({
|
|||
isFocus={isFocus && !readOnly}
|
||||
minHeight={minHeight}
|
||||
isInNode={isInNode}
|
||||
onGenerated={handleGenerated}
|
||||
onGenerated={onGenerated}
|
||||
codeLanguages={language}
|
||||
fileList={fileList}
|
||||
showFileList={showFileList}
|
||||
|
|
326
web/app/components/workflow/nodes/code/code-parser.spec.ts
Normal file
326
web/app/components/workflow/nodes/code/code-parser.spec.ts
Normal file
|
@ -0,0 +1,326 @@
|
|||
import { VarType } from '../../types'
|
||||
import { extractFunctionParams, extractReturnType } from './code-parser'
|
||||
import { CodeLanguage } from './types'
|
||||
|
||||
const SAMPLE_CODES = {
|
||||
python3: {
|
||||
noParams: 'def main():',
|
||||
singleParam: 'def main(param1):',
|
||||
multipleParams: `def main(param1, param2, param3):
|
||||
return {"result": param1}`,
|
||||
withTypes: `def main(param1: str, param2: int, param3: List[str]):
|
||||
result = process_data(param1, param2)
|
||||
return {"output": result}`,
|
||||
withDefaults: `def main(param1: str = "default", param2: int = 0):
|
||||
return {"data": param1}`,
|
||||
},
|
||||
javascript: {
|
||||
noParams: 'function main() {',
|
||||
singleParam: 'function main(param1) {',
|
||||
multipleParams: `function main(param1, param2, param3) {
|
||||
return { result: param1 }
|
||||
}`,
|
||||
withComments: `// Main function
|
||||
function main(param1, param2) {
|
||||
// Process data
|
||||
return { output: process(param1, param2) }
|
||||
}`,
|
||||
withSpaces: 'function main( param1 , param2 ) {',
|
||||
},
|
||||
}
|
||||
|
||||
describe('extractFunctionParams', () => {
|
||||
describe('Python3', () => {
|
||||
test('handles no parameters', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.python3.noParams, CodeLanguage.python3)
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
test('extracts single parameter', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.python3.singleParam, CodeLanguage.python3)
|
||||
expect(result).toEqual(['param1'])
|
||||
})
|
||||
|
||||
test('extracts multiple parameters', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.python3.multipleParams, CodeLanguage.python3)
|
||||
expect(result).toEqual(['param1', 'param2', 'param3'])
|
||||
})
|
||||
|
||||
test('handles type hints', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.python3.withTypes, CodeLanguage.python3)
|
||||
expect(result).toEqual(['param1', 'param2', 'param3'])
|
||||
})
|
||||
|
||||
test('handles default values', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.python3.withDefaults, CodeLanguage.python3)
|
||||
expect(result).toEqual(['param1', 'param2'])
|
||||
})
|
||||
})
|
||||
|
||||
// JavaScriptのテストケース
|
||||
describe('JavaScript', () => {
|
||||
test('handles no parameters', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.javascript.noParams, CodeLanguage.javascript)
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
test('extracts single parameter', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.javascript.singleParam, CodeLanguage.javascript)
|
||||
expect(result).toEqual(['param1'])
|
||||
})
|
||||
|
||||
test('extracts multiple parameters', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.javascript.multipleParams, CodeLanguage.javascript)
|
||||
expect(result).toEqual(['param1', 'param2', 'param3'])
|
||||
})
|
||||
|
||||
test('handles comments in code', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.javascript.withComments, CodeLanguage.javascript)
|
||||
expect(result).toEqual(['param1', 'param2'])
|
||||
})
|
||||
|
||||
test('handles whitespace', () => {
|
||||
const result = extractFunctionParams(SAMPLE_CODES.javascript.withSpaces, CodeLanguage.javascript)
|
||||
expect(result).toEqual(['param1', 'param2'])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const RETURN_TYPE_SAMPLES = {
|
||||
python3: {
|
||||
singleReturn: `
|
||||
def main(param1):
|
||||
return {"result": "value"}`,
|
||||
|
||||
multipleReturns: `
|
||||
def main(param1, param2):
|
||||
return {"result": "value", "status": "success"}`,
|
||||
|
||||
noReturn: `
|
||||
def main():
|
||||
print("Hello")`,
|
||||
|
||||
complexReturn: `
|
||||
def main():
|
||||
data = process()
|
||||
return {"result": data, "count": 42, "messages": ["hello"]}`,
|
||||
nestedObject: `
|
||||
def main(name, age, city):
|
||||
return {
|
||||
'personal_info': {
|
||||
'name': name,
|
||||
'age': age,
|
||||
'city': city
|
||||
},
|
||||
'timestamp': int(time.time()),
|
||||
'status': 'active'
|
||||
}`,
|
||||
},
|
||||
|
||||
javascript: {
|
||||
singleReturn: `
|
||||
function main(param1) {
|
||||
return { result: "value" }
|
||||
}`,
|
||||
|
||||
multipleReturns: `
|
||||
function main(param1) {
|
||||
return { result: "value", status: "success" }
|
||||
}`,
|
||||
|
||||
withParentheses: `
|
||||
function main() {
|
||||
return ({ result: "value", status: "success" })
|
||||
}`,
|
||||
|
||||
noReturn: `
|
||||
function main() {
|
||||
console.log("Hello")
|
||||
}`,
|
||||
|
||||
withQuotes: `
|
||||
function main() {
|
||||
return { "result": 'value', 'status': "success" }
|
||||
}`,
|
||||
nestedObject: `
|
||||
function main(name, age, city) {
|
||||
return {
|
||||
personal_info: {
|
||||
name: name,
|
||||
age: age,
|
||||
city: city
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
status: 'active'
|
||||
}
|
||||
}`,
|
||||
withJSDoc: `
|
||||
/**
|
||||
* Creates a user profile with personal information and metadata
|
||||
* @param {string} name - The user's name
|
||||
* @param {number} age - The user's age
|
||||
* @param {string} city - The user's city of residence
|
||||
* @returns {Object} An object containing the user profile
|
||||
*/
|
||||
function main(name, age, city) {
|
||||
return {
|
||||
result: {
|
||||
personal_info: {
|
||||
name: name,
|
||||
age: age,
|
||||
city: city
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
status: 'active'
|
||||
}
|
||||
};
|
||||
}`,
|
||||
|
||||
},
|
||||
}
|
||||
|
||||
describe('extractReturnType', () => {
|
||||
// Python3のテスト
|
||||
describe('Python3', () => {
|
||||
test('extracts single return value', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.python3.singleReturn, CodeLanguage.python3)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('extracts multiple return values', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.python3.multipleReturns, CodeLanguage.python3)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('returns empty object when no return statement', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.python3.noReturn, CodeLanguage.python3)
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
test('handles complex return statement', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.python3.complexReturn, CodeLanguage.python3)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
count: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
messages: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
test('handles nested object structure', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.python3.nestedObject, CodeLanguage.python3)
|
||||
expect(result).toEqual({
|
||||
personal_info: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
timestamp: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// JavaScriptのテスト
|
||||
describe('JavaScript', () => {
|
||||
test('extracts single return value', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.singleReturn, CodeLanguage.javascript)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('extracts multiple return values', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.multipleReturns, CodeLanguage.javascript)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('handles return with parentheses', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.withParentheses, CodeLanguage.javascript)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('returns empty object when no return statement', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.noReturn, CodeLanguage.javascript)
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
test('handles quoted keys', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.withQuotes, CodeLanguage.javascript)
|
||||
expect(result).toEqual({
|
||||
result: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
test('handles nested object structure', () => {
|
||||
const result = extractReturnType(RETURN_TYPE_SAMPLES.javascript.nestedObject, CodeLanguage.javascript)
|
||||
expect(result).toEqual({
|
||||
personal_info: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
timestamp: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
status: {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
86
web/app/components/workflow/nodes/code/code-parser.ts
Normal file
86
web/app/components/workflow/nodes/code/code-parser.ts
Normal file
|
@ -0,0 +1,86 @@
|
|||
import { VarType } from '../../types'
|
||||
import type { OutputVar } from './types'
|
||||
import { CodeLanguage } from './types'
|
||||
|
||||
export const extractFunctionParams = (code: string, language: CodeLanguage) => {
|
||||
if (language === CodeLanguage.json)
|
||||
return []
|
||||
|
||||
const patterns: Record<Exclude<CodeLanguage, CodeLanguage.json>, RegExp> = {
|
||||
[CodeLanguage.python3]: /def\s+main\s*\((.*?)\)/,
|
||||
[CodeLanguage.javascript]: /function\s+main\s*\((.*?)\)/,
|
||||
}
|
||||
const match = code.match(patterns[language])
|
||||
const params: string[] = []
|
||||
|
||||
if (match?.[1]) {
|
||||
params.push(...match[1].split(',')
|
||||
.map(p => p.trim())
|
||||
.filter(Boolean)
|
||||
.map(p => p.split(':')[0].trim()),
|
||||
)
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
export const extractReturnType = (code: string, language: CodeLanguage): OutputVar => {
|
||||
const codeWithoutComments = code.replace(/\/\*\*[\s\S]*?\*\//, '')
|
||||
console.log(codeWithoutComments)
|
||||
|
||||
const returnIndex = codeWithoutComments.indexOf('return')
|
||||
if (returnIndex === -1)
|
||||
return {}
|
||||
|
||||
// returnから始まる部分文字列を取得
|
||||
const codeAfterReturn = codeWithoutComments.slice(returnIndex)
|
||||
|
||||
let bracketCount = 0
|
||||
let startIndex = codeAfterReturn.indexOf('{')
|
||||
|
||||
if (language === CodeLanguage.javascript && startIndex === -1) {
|
||||
const parenStart = codeAfterReturn.indexOf('(')
|
||||
if (parenStart !== -1)
|
||||
startIndex = codeAfterReturn.indexOf('{', parenStart)
|
||||
}
|
||||
|
||||
if (startIndex === -1)
|
||||
return {}
|
||||
|
||||
let endIndex = -1
|
||||
|
||||
for (let i = startIndex; i < codeAfterReturn.length; i++) {
|
||||
if (codeAfterReturn[i] === '{')
|
||||
bracketCount++
|
||||
if (codeAfterReturn[i] === '}') {
|
||||
bracketCount--
|
||||
if (bracketCount === 0) {
|
||||
endIndex = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (endIndex === -1)
|
||||
return {}
|
||||
|
||||
const returnContent = codeAfterReturn.slice(startIndex + 1, endIndex - 1)
|
||||
console.log(returnContent)
|
||||
|
||||
const result: OutputVar = {}
|
||||
|
||||
const keyRegex = /['"]?(\w+)['"]?\s*:(?![^{]*})/g
|
||||
const matches = returnContent.matchAll(keyRegex)
|
||||
|
||||
for (const match of matches) {
|
||||
console.log(`Found key: "${match[1]}" from match: "${match[0]}"`)
|
||||
const key = match[1]
|
||||
result[key] = {
|
||||
type: VarType.string,
|
||||
children: null,
|
||||
}
|
||||
}
|
||||
|
||||
console.log(result)
|
||||
|
||||
return result
|
||||
}
|
|
@ -5,6 +5,7 @@ import RemoveEffectVarConfirm from '../_base/components/remove-effect-var-confir
|
|||
import useConfig from './use-config'
|
||||
import type { CodeNodeType } from './types'
|
||||
import { CodeLanguage } from './types'
|
||||
import { extractFunctionParams, extractReturnType } from './code-parser'
|
||||
import VarList from '@/app/components/workflow/nodes/_base/components/variable/var-list'
|
||||
import OutputVarList from '@/app/components/workflow/nodes/_base/components/variable/output-var-list'
|
||||
import AddButton from '@/app/components/base/button/add-button'
|
||||
|
@ -12,10 +13,9 @@ import Field from '@/app/components/workflow/nodes/_base/components/field'
|
|||
import Split from '@/app/components/workflow/nodes/_base/components/split'
|
||||
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
|
||||
import TypeSelector from '@/app/components/workflow/nodes/_base/components/selector'
|
||||
import type { NodePanelProps } from '@/app/components/workflow/types'
|
||||
import { type NodePanelProps } from '@/app/components/workflow/types'
|
||||
import BeforeRunForm from '@/app/components/workflow/nodes/_base/components/before-run-form'
|
||||
import ResultPanel from '@/app/components/workflow/run/result-panel'
|
||||
|
||||
const i18nPrefix = 'workflow.nodes.code'
|
||||
|
||||
const codeLanguages = [
|
||||
|
@ -38,6 +38,7 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
|||
readOnly,
|
||||
inputs,
|
||||
outputKeyOrders,
|
||||
handleCodeAndVarsChange,
|
||||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
handleRemoveVariable,
|
||||
|
@ -61,6 +62,18 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
|||
setInputVarValues,
|
||||
} = useConfig(id, data)
|
||||
|
||||
const handleGeneratedCode = (value: string) => {
|
||||
const params = extractFunctionParams(value, inputs.code_language)
|
||||
const codeNewInput = params.map((p) => {
|
||||
return {
|
||||
variable: p,
|
||||
value_selector: [],
|
||||
}
|
||||
})
|
||||
const returnTypes = extractReturnType(value, inputs.code_language)
|
||||
handleCodeAndVarsChange(value, codeNewInput, returnTypes)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='mt-2'>
|
||||
<div className='px-4 pb-4 space-y-4'>
|
||||
|
@ -92,6 +105,7 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
|||
language={inputs.code_language}
|
||||
value={inputs.code}
|
||||
onChange={handleCodeChange}
|
||||
onGenerated={handleGeneratedCode}
|
||||
showCodeGenerator={true}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -3,7 +3,7 @@ import produce from 'immer'
|
|||
import useVarList from '../_base/hooks/use-var-list'
|
||||
import useOutputVarList from '../_base/hooks/use-output-var-list'
|
||||
import { BlockEnum, VarType } from '../../types'
|
||||
import type { Var } from '../../types'
|
||||
import type { Var, Variable } from '../../types'
|
||||
import { useStore } from '../../store'
|
||||
import type { CodeNodeType, OutputVar } from './types'
|
||||
import { CodeLanguage } from './types'
|
||||
|
@ -136,7 +136,15 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
|||
const setInputVarValues = useCallback((newPayload: Record<string, any>) => {
|
||||
setRunInputData(newPayload)
|
||||
}, [setRunInputData])
|
||||
|
||||
const handleCodeAndVarsChange = useCallback((code: string, inputVariables: Variable[], outputVariables: OutputVar) => {
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.code = code
|
||||
draft.variables = inputVariables
|
||||
draft.outputs = outputVariables
|
||||
})
|
||||
setInputs(newInputs)
|
||||
syncOutputKeyOrders(outputVariables)
|
||||
}, [inputs, setInputs, syncOutputKeyOrders])
|
||||
return {
|
||||
readOnly,
|
||||
inputs,
|
||||
|
@ -163,6 +171,7 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
|||
inputVarValues,
|
||||
setInputVarValues,
|
||||
runResult,
|
||||
handleCodeAndVarsChange,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user