mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
Merge branch 'deploy/dev' of github.com:langgenius/dify into deploy/dev
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
This commit is contained in:
commit
8cc63ffd65
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
|
||||||
|
@ -29,6 +30,9 @@ from models import account, dataset, model, source, task, tool, tools, web # no
|
||||||
|
|
||||||
# DO NOT REMOVE ABOVE
|
# DO NOT REMOVE ABOVE
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 10):
|
||||||
|
print("Warning: Python 3.10 will not be supported in the next version.")
|
||||||
|
|
||||||
|
|
||||||
warnings.simplefilter("ignore", ResourceWarning)
|
warnings.simplefilter("ignore", ResourceWarning)
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
||||||
|
|
||||||
CURRENT_VERSION: str = Field(
|
CURRENT_VERSION: str = Field(
|
||||||
description="Dify version",
|
description="Dify version",
|
||||||
default="0.11.0",
|
default="0.11.1",
|
||||||
)
|
)
|
||||||
|
|
||||||
COMMIT_SHA: str = Field(
|
COMMIT_SHA: str = Field(
|
||||||
|
|
|
@ -617,6 +617,10 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
|
||||||
# o1 compatibility
|
# o1 compatibility
|
||||||
block_as_stream = False
|
block_as_stream = False
|
||||||
if model.startswith("o1"):
|
if model.startswith("o1"):
|
||||||
|
if "max_tokens" in model_parameters:
|
||||||
|
model_parameters["max_completion_tokens"] = model_parameters["max_tokens"]
|
||||||
|
del model_parameters["max_tokens"]
|
||||||
|
|
||||||
if stream:
|
if stream:
|
||||||
block_as_stream = True
|
block_as_stream = True
|
||||||
stream = False
|
stream = False
|
||||||
|
|
|
@ -50,9 +50,9 @@ class WordExtractor(BaseExtractor):
|
||||||
|
|
||||||
self.web_path = self.file_path
|
self.web_path = self.file_path
|
||||||
# TODO: use a better way to handle the file
|
# TODO: use a better way to handle the file
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as self.temp_file:
|
self.temp_file = tempfile.NamedTemporaryFile() # noqa: SIM115
|
||||||
self.temp_file.write(r.content)
|
self.temp_file.write(r.content)
|
||||||
self.file_path = self.temp_file.name
|
self.file_path = self.temp_file.name
|
||||||
elif not os.path.isfile(self.file_path):
|
elif not os.path.isfile(self.file_path):
|
||||||
raise ValueError(f"File path {self.file_path} is not a valid file or url")
|
raise ValueError(f"File path {self.file_path} is not a valid file or url")
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||||
from core.tools.entities.common_entities import I18nObject
|
from core.tools.entities.common_entities import I18nObject
|
||||||
|
@ -32,9 +32,14 @@ class UserToolProvider(BaseModel):
|
||||||
original_credentials: Optional[dict] = None
|
original_credentials: Optional[dict] = None
|
||||||
is_team_authorization: bool = False
|
is_team_authorization: bool = False
|
||||||
allow_delete: bool = True
|
allow_delete: bool = True
|
||||||
tools: list[UserTool] | None = None
|
tools: list[UserTool] = Field(default_factory=list)
|
||||||
labels: list[str] | None = None
|
labels: list[str] | None = None
|
||||||
|
|
||||||
|
@field_validator("tools", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def convert_none_to_empty_list(cls, v):
|
||||||
|
return v if v is not None else []
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
# -------------
|
# -------------
|
||||||
# overwrite tool parameter types for temp fix
|
# overwrite tool parameter types for temp fix
|
||||||
|
|
|
@ -5,7 +5,7 @@ identity:
|
||||||
en_US: Gitee AI
|
en_US: Gitee AI
|
||||||
zh_Hans: Gitee AI
|
zh_Hans: Gitee AI
|
||||||
description:
|
description:
|
||||||
en_US: 快速体验大模型,领先探索 AI 开源世界
|
en_US: Quickly experience large models and explore the leading AI open source world
|
||||||
zh_Hans: 快速体验大模型,领先探索 AI 开源世界
|
zh_Hans: 快速体验大模型,领先探索 AI 开源世界
|
||||||
icon: icon.svg
|
icon: icon.svg
|
||||||
tags:
|
tags:
|
||||||
|
|
|
@ -32,7 +32,7 @@ parameters:
|
||||||
en_US: RAG Model for your database DDL
|
en_US: RAG Model for your database DDL
|
||||||
zh_Hans: 存储数据库训练数据的RAG模型
|
zh_Hans: 存储数据库训练数据的RAG模型
|
||||||
llm_description: RAG Model for generating SQL
|
llm_description: RAG Model for generating SQL
|
||||||
form: form
|
form: llm
|
||||||
- name: db_type
|
- name: db_type
|
||||||
type: select
|
type: select
|
||||||
required: true
|
required: true
|
||||||
|
@ -136,7 +136,7 @@ parameters:
|
||||||
human_description:
|
human_description:
|
||||||
en_US: DDL statements for training data
|
en_US: DDL statements for training data
|
||||||
zh_Hans: 用于训练RAG Model的建表语句
|
zh_Hans: 用于训练RAG Model的建表语句
|
||||||
form: form
|
form: llm
|
||||||
- name: question
|
- name: question
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
|
@ -146,7 +146,7 @@ parameters:
|
||||||
human_description:
|
human_description:
|
||||||
en_US: Question-SQL Pairs
|
en_US: Question-SQL Pairs
|
||||||
zh_Hans: Question-SQL中的问题
|
zh_Hans: Question-SQL中的问题
|
||||||
form: form
|
form: llm
|
||||||
- name: sql
|
- name: sql
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
|
@ -156,7 +156,7 @@ parameters:
|
||||||
human_description:
|
human_description:
|
||||||
en_US: SQL queries to your training data
|
en_US: SQL queries to your training data
|
||||||
zh_Hans: 用于训练RAG Model的SQL语句
|
zh_Hans: 用于训练RAG Model的SQL语句
|
||||||
form: form
|
form: llm
|
||||||
- name: memos
|
- name: memos
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
|
@ -166,7 +166,7 @@ parameters:
|
||||||
human_description:
|
human_description:
|
||||||
en_US: Sometimes you may want to add documentation about your business terminology or definitions
|
en_US: Sometimes you may want to add documentation about your business terminology or definitions
|
||||||
zh_Hans: 添加更多关于数据库的业务说明
|
zh_Hans: 添加更多关于数据库的业务说明
|
||||||
form: form
|
form: llm
|
||||||
- name: enable_training
|
- name: enable_training
|
||||||
type: boolean
|
type: boolean
|
||||||
required: false
|
required: false
|
||||||
|
|
|
@ -555,6 +555,7 @@ class ToolManager:
|
||||||
"""
|
"""
|
||||||
get tool provider
|
get tool provider
|
||||||
"""
|
"""
|
||||||
|
provider_name = provider
|
||||||
provider: ApiToolProvider = (
|
provider: ApiToolProvider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
|
@ -565,7 +566,7 @@ class ToolManager:
|
||||||
)
|
)
|
||||||
|
|
||||||
if provider is None:
|
if provider is None:
|
||||||
raise ValueError(f"you have not added provider {provider}")
|
raise ValueError(f"you have not added provider {provider_name}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
credentials = json.loads(provider.credentials_str) or {}
|
credentials = json.loads(provider.credentials_str) or {}
|
||||||
|
|
|
@ -356,3 +356,19 @@ def content_digest(element):
|
||||||
digest.update(child.encode("utf-8"))
|
digest.update(child.encode("utf-8"))
|
||||||
digest = digest.hexdigest()
|
digest = digest.hexdigest()
|
||||||
return digest
|
return digest
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_upload_file_ids(content):
|
||||||
|
pattern = r"!\[image\]\((http?://.*?(file-preview|image-preview))\)"
|
||||||
|
matches = re.findall(pattern, content)
|
||||||
|
image_upload_file_ids = []
|
||||||
|
for match in matches:
|
||||||
|
if match[1] == "file-preview":
|
||||||
|
content_pattern = r"files/([^/]+)/file-preview"
|
||||||
|
else:
|
||||||
|
content_pattern = r"files/([^/]+)/image-preview"
|
||||||
|
content_match = re.search(content_pattern, match[0])
|
||||||
|
if content_match:
|
||||||
|
image_upload_file_id = content_match.group(1)
|
||||||
|
image_upload_file_ids.append(image_upload_file_id)
|
||||||
|
return image_upload_file_ids
|
||||||
|
|
|
@ -489,7 +489,10 @@ class IterationNode(BaseNode[IterationNodeData]):
|
||||||
)
|
)
|
||||||
yield metadata_event
|
yield metadata_event
|
||||||
|
|
||||||
current_iteration_output = variable_pool.get(self.node_data.output_selector).value
|
current_output_segment = variable_pool.get(self.node_data.output_selector)
|
||||||
|
if current_output_segment is None:
|
||||||
|
raise IterationNodeError("iteration output selector not found")
|
||||||
|
current_iteration_output = current_output_segment.value
|
||||||
outputs[current_index] = current_iteration_output
|
outputs[current_index] = current_iteration_output
|
||||||
# remove all nodes outputs from variable pool
|
# remove all nodes outputs from variable pool
|
||||||
for node_id in iteration_graph.node_ids:
|
for node_id in iteration_graph.node_ids:
|
||||||
|
|
|
@ -59,4 +59,4 @@ class ListOperatorNodeData(BaseNodeData):
|
||||||
filter_by: FilterBy
|
filter_by: FilterBy
|
||||||
order_by: OrderBy
|
order_by: OrderBy
|
||||||
limit: Limit
|
limit: Limit
|
||||||
extract_by: ExtractConfig
|
extract_by: ExtractConfig = Field(default_factory=ExtractConfig)
|
||||||
|
|
|
@ -46,7 +46,6 @@ def init_app(app: Flask) -> Celery:
|
||||||
broker_connection_retry_on_startup=True,
|
broker_connection_retry_on_startup=True,
|
||||||
worker_log_format=dify_config.LOG_FORMAT,
|
worker_log_format=dify_config.LOG_FORMAT,
|
||||||
worker_task_log_format=dify_config.LOG_FORMAT,
|
worker_task_log_format=dify_config.LOG_FORMAT,
|
||||||
worker_logfile=dify_config.LOG_FILE,
|
|
||||||
worker_hijack_root_logger=False,
|
worker_hijack_root_logger=False,
|
||||||
timezone=pytz.timezone(dify_config.LOG_TZ),
|
timezone=pytz.timezone(dify_config.LOG_TZ),
|
||||||
)
|
)
|
||||||
|
@ -56,6 +55,11 @@ def init_app(app: Flask) -> Celery:
|
||||||
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
|
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if dify_config.LOG_FILE:
|
||||||
|
celery_app.conf.update(
|
||||||
|
worker_logfile=dify_config.LOG_FILE,
|
||||||
|
)
|
||||||
|
|
||||||
celery_app.set_default()
|
celery_app.set_default()
|
||||||
app.extensions["celery"] = celery_app
|
app.extensions["celery"] = celery_app
|
||||||
|
|
||||||
|
|
|
@ -9,19 +9,21 @@ from configs import dify_config
|
||||||
|
|
||||||
|
|
||||||
def init_app(app: Flask):
|
def init_app(app: Flask):
|
||||||
log_handlers = None
|
log_handlers = []
|
||||||
log_file = dify_config.LOG_FILE
|
log_file = dify_config.LOG_FILE
|
||||||
if log_file:
|
if log_file:
|
||||||
log_dir = os.path.dirname(log_file)
|
log_dir = os.path.dirname(log_file)
|
||||||
os.makedirs(log_dir, exist_ok=True)
|
os.makedirs(log_dir, exist_ok=True)
|
||||||
log_handlers = [
|
log_handlers.append(
|
||||||
RotatingFileHandler(
|
RotatingFileHandler(
|
||||||
filename=log_file,
|
filename=log_file,
|
||||||
maxBytes=dify_config.LOG_FILE_MAX_SIZE * 1024 * 1024,
|
maxBytes=dify_config.LOG_FILE_MAX_SIZE * 1024 * 1024,
|
||||||
backupCount=dify_config.LOG_FILE_BACKUP_COUNT,
|
backupCount=dify_config.LOG_FILE_BACKUP_COUNT,
|
||||||
),
|
)
|
||||||
logging.StreamHandler(sys.stdout),
|
)
|
||||||
]
|
|
||||||
|
# Always add StreamHandler to log to console
|
||||||
|
log_handlers.append(logging.StreamHandler(sys.stdout))
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=dify_config.LOG_LEVEL,
|
level=dify_config.LOG_LEVEL,
|
||||||
|
|
|
@ -113,8 +113,10 @@ class ApiToolManageService:
|
||||||
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
||||||
raise ValueError(f"invalid schema type {schema}")
|
raise ValueError(f"invalid schema type {schema}")
|
||||||
|
|
||||||
|
provider_name = provider_name.strip()
|
||||||
|
|
||||||
# check if the provider exists
|
# check if the provider exists
|
||||||
provider: ApiToolProvider = (
|
provider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
ApiToolProvider.tenant_id == tenant_id,
|
ApiToolProvider.tenant_id == tenant_id,
|
||||||
|
@ -199,21 +201,21 @@ class ApiToolManageService:
|
||||||
return {"schema": schema}
|
return {"schema": schema}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider: str) -> list[UserTool]:
|
def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: str) -> list[UserTool]:
|
||||||
"""
|
"""
|
||||||
list api tool provider tools
|
list api tool provider tools
|
||||||
"""
|
"""
|
||||||
provider: ApiToolProvider = (
|
provider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
ApiToolProvider.tenant_id == tenant_id,
|
ApiToolProvider.tenant_id == tenant_id,
|
||||||
ApiToolProvider.name == provider,
|
ApiToolProvider.name == provider_name,
|
||||||
)
|
)
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
if provider is None:
|
if provider is None:
|
||||||
raise ValueError(f"you have not added provider {provider}")
|
raise ValueError(f"you have not added provider {provider_name}")
|
||||||
|
|
||||||
controller = ToolTransformService.api_provider_to_controller(db_provider=provider)
|
controller = ToolTransformService.api_provider_to_controller(db_provider=provider)
|
||||||
labels = ToolLabelManager.get_tool_labels(controller)
|
labels = ToolLabelManager.get_tool_labels(controller)
|
||||||
|
@ -246,8 +248,10 @@ class ApiToolManageService:
|
||||||
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
if schema_type not in [member.value for member in ApiProviderSchemaType]:
|
||||||
raise ValueError(f"invalid schema type {schema}")
|
raise ValueError(f"invalid schema type {schema}")
|
||||||
|
|
||||||
|
provider_name = provider_name.strip()
|
||||||
|
|
||||||
# check if the provider exists
|
# check if the provider exists
|
||||||
provider: ApiToolProvider = (
|
provider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
ApiToolProvider.tenant_id == tenant_id,
|
ApiToolProvider.tenant_id == tenant_id,
|
||||||
|
@ -314,7 +318,7 @@ class ApiToolManageService:
|
||||||
"""
|
"""
|
||||||
delete tool provider
|
delete tool provider
|
||||||
"""
|
"""
|
||||||
provider: ApiToolProvider = (
|
provider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
ApiToolProvider.tenant_id == tenant_id,
|
ApiToolProvider.tenant_id == tenant_id,
|
||||||
|
@ -364,7 +368,7 @@ class ApiToolManageService:
|
||||||
if tool_bundle is None:
|
if tool_bundle is None:
|
||||||
raise ValueError(f"invalid tool name {tool_name}")
|
raise ValueError(f"invalid tool name {tool_name}")
|
||||||
|
|
||||||
db_provider: ApiToolProvider = (
|
db_provider = (
|
||||||
db.session.query(ApiToolProvider)
|
db.session.query(ApiToolProvider)
|
||||||
.filter(
|
.filter(
|
||||||
ApiToolProvider.tenant_id == tenant_id,
|
ApiToolProvider.tenant_id == tenant_id,
|
||||||
|
|
|
@ -5,6 +5,7 @@ import click
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
|
|
||||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||||
|
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from extensions.ext_storage import storage
|
from extensions.ext_storage import storage
|
||||||
from models.dataset import (
|
from models.dataset import (
|
||||||
|
@ -67,6 +68,16 @@ def clean_dataset_task(
|
||||||
db.session.delete(document)
|
db.session.delete(document)
|
||||||
|
|
||||||
for segment in segments:
|
for segment in segments:
|
||||||
|
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||||
|
for upload_file_id in image_upload_file_ids:
|
||||||
|
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||||
|
try:
|
||||||
|
storage.delete(image_file.key)
|
||||||
|
except Exception:
|
||||||
|
logging.exception(
|
||||||
|
"Delete image_files failed when storage deleted, \
|
||||||
|
image_upload_file_is: {}".format(upload_file_id)
|
||||||
|
)
|
||||||
db.session.delete(segment)
|
db.session.delete(segment)
|
||||||
|
|
||||||
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()
|
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()
|
||||||
|
|
|
@ -6,6 +6,7 @@ import click
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
|
|
||||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||||
|
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from extensions.ext_storage import storage
|
from extensions.ext_storage import storage
|
||||||
from models.dataset import Dataset, DocumentSegment
|
from models.dataset import Dataset, DocumentSegment
|
||||||
|
@ -40,6 +41,16 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||||
index_processor.clean(dataset, index_node_ids)
|
index_processor.clean(dataset, index_node_ids)
|
||||||
|
|
||||||
for segment in segments:
|
for segment in segments:
|
||||||
|
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||||
|
for upload_file_id in image_upload_file_ids:
|
||||||
|
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||||
|
try:
|
||||||
|
storage.delete(image_file.key)
|
||||||
|
except Exception:
|
||||||
|
logging.exception(
|
||||||
|
"Delete image_files failed when storage deleted, \
|
||||||
|
image_upload_file_is: {}".format(upload_file_id)
|
||||||
|
)
|
||||||
db.session.delete(segment)
|
db.session.delete(segment)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -2,7 +2,7 @@ version: '3'
|
||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:0.11.0
|
image: langgenius/dify-api:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Startup mode, 'api' starts the API server.
|
# Startup mode, 'api' starts the API server.
|
||||||
|
@ -227,7 +227,7 @@ services:
|
||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing the queue.
|
# The Celery worker for processing the queue.
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:0.11.0
|
image: langgenius/dify-api:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_WEB_URL: ''
|
CONSOLE_WEB_URL: ''
|
||||||
|
@ -397,7 +397,7 @@ services:
|
||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:0.11.0
|
image: langgenius/dify-web:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
||||||
|
|
|
@ -280,7 +280,7 @@ x-shared-env: &shared-api-worker-env
|
||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:0.11.0
|
image: langgenius/dify-api:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
|
@ -300,7 +300,7 @@ services:
|
||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing the queue.
|
# The Celery worker for processing the queue.
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:0.11.0
|
image: langgenius/dify-api:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
|
@ -319,7 +319,7 @@ services:
|
||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:0.11.0
|
image: langgenius/dify-web:0.11.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||||
|
|
|
@ -261,6 +261,10 @@ const SettingsModal: FC<ISettingsModalProps> = ({
|
||||||
onChange={onChange('chatColorTheme')}
|
onChange={onChange('chatColorTheme')}
|
||||||
placeholder='E.g #A020F0'
|
placeholder='E.g #A020F0'
|
||||||
/>
|
/>
|
||||||
|
<div className="mt-1 flex justify-between items-center">
|
||||||
|
<p className={`ml-2 ${s.settingsTip} text-gray-500`}>{t(`${prefixSettings}.chatColorThemeInverted`)}</p>
|
||||||
|
<Switch defaultValue={inputInfo.chatColorThemeInverted} onChange={v => setInputInfo({ ...inputInfo, chatColorThemeInverted: v })}></Switch>
|
||||||
|
</div>
|
||||||
</>}
|
</>}
|
||||||
{systemFeatures.enable_web_sso_switch_component && <div className='w-full mt-8'>
|
{systemFeatures.enable_web_sso_switch_component && <div className='w-full mt-8'>
|
||||||
<p className='system-xs-medium text-gray-500'>{t(`${prefixSettings}.sso.label`)}</p>
|
<p className='system-xs-medium text-gray-500'>{t(`${prefixSettings}.sso.label`)}</p>
|
||||||
|
|
|
@ -94,6 +94,7 @@ const TextGeneration: FC<IMainProps> = ({
|
||||||
const [isCallBatchAPI, setIsCallBatchAPI] = useState(false)
|
const [isCallBatchAPI, setIsCallBatchAPI] = useState(false)
|
||||||
const isInBatchTab = currentTab === 'batch'
|
const isInBatchTab = currentTab === 'batch'
|
||||||
const [inputs, setInputs] = useState<Record<string, any>>({})
|
const [inputs, setInputs] = useState<Record<string, any>>({})
|
||||||
|
const inputsRef = useRef(inputs)
|
||||||
const [appId, setAppId] = useState<string>('')
|
const [appId, setAppId] = useState<string>('')
|
||||||
const [siteInfo, setSiteInfo] = useState<SiteInfo | null>(null)
|
const [siteInfo, setSiteInfo] = useState<SiteInfo | null>(null)
|
||||||
const [canReplaceLogo, setCanReplaceLogo] = useState<boolean>(false)
|
const [canReplaceLogo, setCanReplaceLogo] = useState<boolean>(false)
|
||||||
|
@ -604,6 +605,7 @@ const TextGeneration: FC<IMainProps> = ({
|
||||||
<RunOnce
|
<RunOnce
|
||||||
siteInfo={siteInfo}
|
siteInfo={siteInfo}
|
||||||
inputs={inputs}
|
inputs={inputs}
|
||||||
|
inputsRef={inputsRef}
|
||||||
onInputsChange={setInputs}
|
onInputsChange={setInputs}
|
||||||
promptConfig={promptConfig}
|
promptConfig={promptConfig}
|
||||||
onSend={handleSend}
|
onSend={handleSend}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import type { FC, FormEvent } from 'react'
|
import type { FC, FormEvent } from 'react'
|
||||||
import React from 'react'
|
import React, { useCallback } from 'react'
|
||||||
import { useTranslation } from 'react-i18next'
|
import { useTranslation } from 'react-i18next'
|
||||||
import {
|
import {
|
||||||
PlayIcon,
|
PlayIcon,
|
||||||
|
@ -19,6 +19,7 @@ export type IRunOnceProps = {
|
||||||
siteInfo: SiteInfo
|
siteInfo: SiteInfo
|
||||||
promptConfig: PromptConfig
|
promptConfig: PromptConfig
|
||||||
inputs: Record<string, any>
|
inputs: Record<string, any>
|
||||||
|
inputsRef: React.MutableRefObject<Record<string, any>>
|
||||||
onInputsChange: (inputs: Record<string, any>) => void
|
onInputsChange: (inputs: Record<string, any>) => void
|
||||||
onSend: () => void
|
onSend: () => void
|
||||||
visionConfig: VisionSettings
|
visionConfig: VisionSettings
|
||||||
|
@ -27,6 +28,7 @@ export type IRunOnceProps = {
|
||||||
const RunOnce: FC<IRunOnceProps> = ({
|
const RunOnce: FC<IRunOnceProps> = ({
|
||||||
promptConfig,
|
promptConfig,
|
||||||
inputs,
|
inputs,
|
||||||
|
inputsRef,
|
||||||
onInputsChange,
|
onInputsChange,
|
||||||
onSend,
|
onSend,
|
||||||
visionConfig,
|
visionConfig,
|
||||||
|
@ -47,6 +49,11 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
onSend()
|
onSend()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const handleInputsChange = useCallback((newInputs: Record<string, any>) => {
|
||||||
|
onInputsChange(newInputs)
|
||||||
|
inputsRef.current = newInputs
|
||||||
|
}, [onInputsChange, inputsRef])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="">
|
<div className="">
|
||||||
<section>
|
<section>
|
||||||
|
@ -60,7 +67,7 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
<Select
|
<Select
|
||||||
className='w-full'
|
className='w-full'
|
||||||
defaultValue={inputs[item.key]}
|
defaultValue={inputs[item.key]}
|
||||||
onSelect={(i) => { onInputsChange({ ...inputs, [item.key]: i.value }) }}
|
onSelect={(i) => { handleInputsChange({ ...inputsRef.current, [item.key]: i.value }) }}
|
||||||
items={(item.options || []).map(i => ({ name: i, value: i }))}
|
items={(item.options || []).map(i => ({ name: i, value: i }))}
|
||||||
allowSearch={false}
|
allowSearch={false}
|
||||||
bgClassName='bg-gray-50'
|
bgClassName='bg-gray-50'
|
||||||
|
@ -72,7 +79,7 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
||||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||||
value={inputs[item.key]}
|
value={inputs[item.key]}
|
||||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
|
||||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
@ -81,7 +88,7 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
className='h-[104px] sm:text-xs'
|
className='h-[104px] sm:text-xs'
|
||||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||||
value={inputs[item.key]}
|
value={inputs[item.key]}
|
||||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{item.type === 'number' && (
|
{item.type === 'number' && (
|
||||||
|
@ -90,12 +97,12 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
||||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||||
value={inputs[item.key]}
|
value={inputs[item.key]}
|
||||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{item.type === 'file' && (
|
{item.type === 'file' && (
|
||||||
<FileUploaderInAttachmentWrapper
|
<FileUploaderInAttachmentWrapper
|
||||||
onChange={(files) => { onInputsChange({ ...inputs, [item.key]: getProcessedFiles(files)[0] }) }}
|
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files)[0] }) }}
|
||||||
fileConfig={{
|
fileConfig={{
|
||||||
...item.config,
|
...item.config,
|
||||||
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
||||||
|
@ -104,7 +111,7 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||||
)}
|
)}
|
||||||
{item.type === 'file-list' && (
|
{item.type === 'file-list' && (
|
||||||
<FileUploaderInAttachmentWrapper
|
<FileUploaderInAttachmentWrapper
|
||||||
onChange={(files) => { onInputsChange({ ...inputs, [item.key]: getProcessedFiles(files) }) }}
|
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files) }) }}
|
||||||
fileConfig={{
|
fileConfig={{
|
||||||
...item.config,
|
...item.config,
|
||||||
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "dify-web",
|
"name": "dify-web",
|
||||||
"version": "0.11.0",
|
"version": "0.11.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.17.0"
|
"node": ">=18.17.0"
|
||||||
|
|
Loading…
Reference in New Issue
Block a user