mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
Compare commits
10 Commits
c15c36cb60
...
9d5ae15cf4
Author | SHA1 | Date | |
---|---|---|---|
|
9d5ae15cf4 | ||
|
e1a7aada70 | ||
|
2eedf70b5a | ||
|
bfa9c4956e | ||
|
0584c46cd1 | ||
|
df507f2808 | ||
|
505a2a93fb | ||
|
0cc110454f | ||
|
9f8d871ed2 | ||
|
17d92c2c02 |
2
.github/actions/setup-poetry/action.yml
vendored
2
.github/actions/setup-poetry/action.yml
vendored
|
@ -4,7 +4,7 @@ inputs:
|
||||||
python-version:
|
python-version:
|
||||||
description: Python version to use and the Poetry installed with
|
description: Python version to use and the Poetry installed with
|
||||||
required: true
|
required: true
|
||||||
default: '3.10'
|
default: '3.11'
|
||||||
poetry-version:
|
poetry-version:
|
||||||
description: Poetry version to set up
|
description: Poetry version to set up
|
||||||
required: true
|
required: true
|
||||||
|
|
1
.github/workflows/api-tests.yml
vendored
1
.github/workflows/api-tests.yml
vendored
|
@ -20,7 +20,6 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version:
|
python-version:
|
||||||
- "3.10"
|
|
||||||
- "3.11"
|
- "3.11"
|
||||||
- "3.12"
|
- "3.12"
|
||||||
|
|
||||||
|
|
1
.github/workflows/vdb-tests.yml
vendored
1
.github/workflows/vdb-tests.yml
vendored
|
@ -20,7 +20,6 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version:
|
python-version:
|
||||||
- "3.10"
|
|
||||||
- "3.11"
|
- "3.11"
|
||||||
- "3.12"
|
- "3.12"
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,6 @@
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
> [!IMPORTANT]
|
|
||||||
> In the v0.6.12 release, we deprecated `pip` as the package management tool for Dify API Backend service and replaced it with `poetry`.
|
|
||||||
|
|
||||||
1. Start the docker-compose stack
|
1. Start the docker-compose stack
|
||||||
|
|
||||||
The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
|
The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
|
||||||
|
@ -30,26 +27,24 @@
|
||||||
SECRET_KEY=${secret_key}" .env
|
SECRET_KEY=${secret_key}" .env
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Create environment.
|
4. Prepare Python environment
|
||||||
|
|
||||||
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
|
Dify API services requires Python 3.11 or 3.12, and the [Poetry](https://python-poetry.org/docs/) for dependency management.
|
||||||
|
- To install Poetry, please refer to
|
||||||
|
the [Poetry's installation guide](https://python-poetry.org/docs/#installation). The simplest way is to run the `pip install poetry` command to install Poetry on pip.
|
||||||
|
- Run `poetry env use 3.12` to switch to the Python version for Poetry, please refer the usage of `poetry env use`
|
||||||
|
command in [Poetry docs](https://python-poetry.org/docs/managing-environments/#switching-between-environments).
|
||||||
|
- Run `poetry shell` to activate the shell environment with Poetry support.
|
||||||
|
|
||||||
5. Install dependencies
|
5. Install dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry env use 3.10
|
cd api
|
||||||
|
poetry env use 3.12
|
||||||
poetry install
|
poetry install
|
||||||
```
|
```
|
||||||
|
|
||||||
In case of contributors missing to update dependencies for `pyproject.toml`, you can perform the following shell instead.
|
6. Run db migration
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry shell # activate current environment
|
|
||||||
poetry add $(cat requirements.txt) # install dependencies of production and update pyproject.toml
|
|
||||||
poetry add $(cat requirements-dev.txt) --group dev # install dependencies of development and update pyproject.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
6. Run migrate
|
|
||||||
|
|
||||||
Before the first launch, migrate the database to the latest version.
|
Before the first launch, migrate the database to the latest version.
|
||||||
|
|
||||||
|
@ -57,15 +52,18 @@
|
||||||
poetry run python -m flask db upgrade
|
poetry run python -m flask db upgrade
|
||||||
```
|
```
|
||||||
|
|
||||||
7. Start backend
|
7. Start api service
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug
|
poetry run python -m flask run --host 0.0.0.0 --port=5001
|
||||||
```
|
```
|
||||||
|
|
||||||
8. Start Dify [web](../web) service.
|
8. Start Dify [web](../web) service.
|
||||||
|
|
||||||
9. Setup your application by visiting `http://localhost:3000`...
|
9. Setup your application by visiting `http://localhost:3000`...
|
||||||
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
|
||||||
|
10. Start the worker service, if you need to handle and debug the async tasks (e.g. dataset importing and documents
|
||||||
|
indexing), please start the worker service.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
|
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
python_version = sys.version_info
|
||||||
|
if not ((3, 11) <= python_version < (3, 13)):
|
||||||
|
print(f"Python 3.11 or 3.12 is required, current version is {python_version.major}.{python_version.minor}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
|
||||||
if not dify_config.DEBUG:
|
if not dify_config.DEBUG:
|
||||||
|
@ -30,9 +35,6 @@ from models import account, dataset, model, source, task, tool, tools, web # no
|
||||||
|
|
||||||
# DO NOT REMOVE ABOVE
|
# DO NOT REMOVE ABOVE
|
||||||
|
|
||||||
if sys.version_info[:2] == (3, 10):
|
|
||||||
print("Warning: Python 3.10 will not be supported in the next version.")
|
|
||||||
|
|
||||||
|
|
||||||
warnings.simplefilter("ignore", ResourceWarning)
|
warnings.simplefilter("ignore", ResourceWarning)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
@ -314,7 +314,7 @@ def _get_conversation(app_model, conversation_id):
|
||||||
raise NotFound("Conversation Not Exists.")
|
raise NotFound("Conversation Not Exists.")
|
||||||
|
|
||||||
if not conversation.read_at:
|
if not conversation.read_at:
|
||||||
conversation.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
conversation.read_account_id = current_user.id
|
conversation.read_account_id = current_user.id
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
|
@ -75,7 +75,7 @@ class AppSite(Resource):
|
||||||
setattr(site, attr_name, value)
|
setattr(site, attr_name, value)
|
||||||
|
|
||||||
site.updated_by = current_user.id
|
site.updated_by = current_user.id
|
||||||
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return site
|
return site
|
||||||
|
@ -99,7 +99,7 @@ class AppSiteAccessTokenReset(Resource):
|
||||||
|
|
||||||
site.code = Site.generate_code(16)
|
site.code = Site.generate_code(16)
|
||||||
site.updated_by = current_user.id
|
site.updated_by = current_user.id
|
||||||
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return site
|
return site
|
||||||
|
|
|
@ -65,7 +65,7 @@ class ActivateApi(Resource):
|
||||||
account.timezone = args["timezone"]
|
account.timezone = args["timezone"]
|
||||||
account.interface_theme = "light"
|
account.interface_theme = "light"
|
||||||
account.status = AccountStatus.ACTIVE.value
|
account.status = AccountStatus.ACTIVE.value
|
||||||
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
@ -106,7 +106,7 @@ class OAuthCallback(Resource):
|
||||||
|
|
||||||
if account.status == AccountStatus.PENDING.value:
|
if account.status == AccountStatus.PENDING.value:
|
||||||
account.status = AccountStatus.ACTIVE.value
|
account.status = AccountStatus.ACTIVE.value
|
||||||
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -83,7 +83,7 @@ class DataSourceApi(Resource):
|
||||||
if action == "enable":
|
if action == "enable":
|
||||||
if data_source_binding.disabled:
|
if data_source_binding.disabled:
|
||||||
data_source_binding.disabled = False
|
data_source_binding.disabled = False
|
||||||
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(data_source_binding)
|
db.session.add(data_source_binding)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
|
@ -92,7 +92,7 @@ class DataSourceApi(Resource):
|
||||||
if action == "disable":
|
if action == "disable":
|
||||||
if not data_source_binding.disabled:
|
if not data_source_binding.disabled:
|
||||||
data_source_binding.disabled = True
|
data_source_binding.disabled = True
|
||||||
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(data_source_binding)
|
db.session.add(data_source_binding)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
from argparse import ArgumentTypeError
|
from argparse import ArgumentTypeError
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
@ -665,7 +665,7 @@ class DocumentProcessingApi(DocumentResource):
|
||||||
raise InvalidActionError("Document not in indexing state.")
|
raise InvalidActionError("Document not in indexing state.")
|
||||||
|
|
||||||
document.paused_by = current_user.id
|
document.paused_by = current_user.id
|
||||||
document.paused_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
document.is_paused = True
|
document.is_paused = True
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
@ -745,7 +745,7 @@ class DocumentMetadataApi(DocumentResource):
|
||||||
document.doc_metadata[key] = value
|
document.doc_metadata[key] = value
|
||||||
|
|
||||||
document.doc_type = doc_type
|
document.doc_type = doc_type
|
||||||
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return {"result": "success", "message": "Document metadata updated."}, 200
|
return {"result": "success", "message": "Document metadata updated."}, 200
|
||||||
|
@ -787,7 +787,7 @@ class DocumentStatusApi(DocumentResource):
|
||||||
document.enabled = True
|
document.enabled = True
|
||||||
document.disabled_at = None
|
document.disabled_at = None
|
||||||
document.disabled_by = None
|
document.disabled_by = None
|
||||||
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
# Set cache to prevent indexing the same document multiple times
|
# Set cache to prevent indexing the same document multiple times
|
||||||
|
@ -804,9 +804,9 @@ class DocumentStatusApi(DocumentResource):
|
||||||
raise InvalidActionError("Document already disabled.")
|
raise InvalidActionError("Document already disabled.")
|
||||||
|
|
||||||
document.enabled = False
|
document.enabled = False
|
||||||
document.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
document.disabled_by = current_user.id
|
document.disabled_by = current_user.id
|
||||||
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
# Set cache to prevent indexing the same document multiple times
|
# Set cache to prevent indexing the same document multiple times
|
||||||
|
@ -821,9 +821,9 @@ class DocumentStatusApi(DocumentResource):
|
||||||
raise InvalidActionError("Document already archived.")
|
raise InvalidActionError("Document already archived.")
|
||||||
|
|
||||||
document.archived = True
|
document.archived = True
|
||||||
document.archived_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.archived_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
document.archived_by = current_user.id
|
document.archived_by = current_user.id
|
||||||
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
if document.enabled:
|
if document.enabled:
|
||||||
|
@ -840,7 +840,7 @@ class DocumentStatusApi(DocumentResource):
|
||||||
document.archived = False
|
document.archived = False
|
||||||
document.archived_at = None
|
document.archived_at = None
|
||||||
document.archived_by = None
|
document.archived_by = None
|
||||||
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
# Set cache to prevent indexing the same document multiple times
|
# Set cache to prevent indexing the same document multiple times
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from flask import request
|
from flask import request
|
||||||
|
@ -188,7 +188,7 @@ class DatasetDocumentSegmentApi(Resource):
|
||||||
raise InvalidActionError("Segment is already disabled.")
|
raise InvalidActionError("Segment is already disabled.")
|
||||||
|
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
segment.disabled_by = current_user.id
|
segment.disabled_by = current_user.id
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restful import reqparse
|
from flask_restful import reqparse
|
||||||
|
@ -46,7 +46,7 @@ class CompletionApi(InstalledAppResource):
|
||||||
streaming = args["response_mode"] == "streaming"
|
streaming = args["response_mode"] == "streaming"
|
||||||
args["auto_generate_name"] = False
|
args["auto_generate_name"] = False
|
||||||
|
|
||||||
installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -106,7 +106,7 @@ class ChatApi(InstalledAppResource):
|
||||||
|
|
||||||
args["auto_generate_name"] = False
|
args["auto_generate_name"] = False
|
||||||
|
|
||||||
installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, inputs, marshal_with, reqparse
|
from flask_restful import Resource, inputs, marshal_with, reqparse
|
||||||
|
@ -81,7 +81,7 @@ class InstalledAppsListApi(Resource):
|
||||||
tenant_id=current_tenant_id,
|
tenant_id=current_tenant_id,
|
||||||
app_owner_tenant_id=app.tenant_id,
|
app_owner_tenant_id=app.tenant_id,
|
||||||
is_pinned=False,
|
is_pinned=False,
|
||||||
last_used_at=datetime.now(timezone.utc).replace(tzinfo=None),
|
last_used_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
db.session.add(new_installed_app)
|
db.session.add(new_installed_app)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -60,7 +60,7 @@ class AccountInitApi(Resource):
|
||||||
raise InvalidInvitationCodeError()
|
raise InvalidInvitationCodeError()
|
||||||
|
|
||||||
invitation_code.status = "used"
|
invitation_code.status = "used"
|
||||||
invitation_code.used_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
invitation_code.used_by_tenant_id = account.current_tenant_id
|
invitation_code.used_by_tenant_id = account.current_tenant_id
|
||||||
invitation_code.used_by_account_id = account.id
|
invitation_code.used_by_account_id = account.id
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ class AccountInitApi(Resource):
|
||||||
account.timezone = args["timezone"]
|
account.timezone = args["timezone"]
|
||||||
account.interface_theme = "light"
|
account.interface_theme = "light"
|
||||||
account.status = "active"
|
account.status = "active"
|
||||||
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -198,7 +198,7 @@ def validate_and_get_api_token(scope=None):
|
||||||
if not api_token:
|
if not api_token:
|
||||||
raise Unauthorized("Access token is invalid")
|
raise Unauthorized("Access token is invalid")
|
||||||
|
|
||||||
api_token.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
api_token.last_used_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return api_token
|
return api_token
|
||||||
|
|
|
@ -2,7 +2,7 @@ import json
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from collections.abc import Mapping, Sequence
|
from collections.abc import Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Optional, Union, cast
|
from typing import Optional, Union, cast
|
||||||
|
|
||||||
from core.agent.entities import AgentEntity, AgentToolEntity
|
from core.agent.entities import AgentEntity, AgentToolEntity
|
||||||
|
@ -419,7 +419,7 @@ class BaseAgentRunner(AppRunner):
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
db_variables.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
db_variables.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool))
|
db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool))
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
db.session.close()
|
db.session.close()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
@ -88,7 +88,7 @@ class PromptTemplateEntity(BaseModel):
|
||||||
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
|
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
|
||||||
|
|
||||||
|
|
||||||
class VariableEntityType(str, Enum):
|
class VariableEntityType(StrEnum):
|
||||||
TEXT_INPUT = "text-input"
|
TEXT_INPUT = "text-input"
|
||||||
SELECT = "select"
|
SELECT = "select"
|
||||||
PARAGRAPH = "paragraph"
|
PARAGRAPH = "paragraph"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
@ -200,7 +200,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
db.session.refresh(conversation)
|
db.session.refresh(conversation)
|
||||||
else:
|
else:
|
||||||
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
message = Message(
|
message = Message(
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, field_validator
|
from pydantic import BaseModel, field_validator
|
||||||
|
@ -11,7 +11,7 @@ from core.workflow.nodes import NodeType
|
||||||
from core.workflow.nodes.base import BaseNodeData
|
from core.workflow.nodes.base import BaseNodeData
|
||||||
|
|
||||||
|
|
||||||
class QueueEvent(str, Enum):
|
class QueueEvent(StrEnum):
|
||||||
"""
|
"""
|
||||||
QueueEvent enum
|
QueueEvent enum
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from collections.abc import Mapping, Sequence
|
from collections.abc import Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any, Optional, Union, cast
|
from typing import Any, Optional, Union, cast
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
@ -144,7 +144,7 @@ class WorkflowCycleManage:
|
||||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
workflow_run.elapsed_time = time.perf_counter() - start_at
|
||||||
workflow_run.total_tokens = total_tokens
|
workflow_run.total_tokens = total_tokens
|
||||||
workflow_run.total_steps = total_steps
|
workflow_run.total_steps = total_steps
|
||||||
workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
db.session.refresh(workflow_run)
|
db.session.refresh(workflow_run)
|
||||||
|
@ -191,7 +191,7 @@ class WorkflowCycleManage:
|
||||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
workflow_run.elapsed_time = time.perf_counter() - start_at
|
||||||
workflow_run.total_tokens = total_tokens
|
workflow_run.total_tokens = total_tokens
|
||||||
workflow_run.total_steps = total_steps
|
workflow_run.total_steps = total_steps
|
||||||
workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
@ -211,7 +211,7 @@ class WorkflowCycleManage:
|
||||||
for workflow_node_execution in running_workflow_node_executions:
|
for workflow_node_execution in running_workflow_node_executions:
|
||||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
|
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
|
||||||
workflow_node_execution.error = error
|
workflow_node_execution.error = error
|
||||||
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_node_execution.elapsed_time = (
|
workflow_node_execution.elapsed_time = (
|
||||||
workflow_node_execution.finished_at - workflow_node_execution.created_at
|
workflow_node_execution.finished_at - workflow_node_execution.created_at
|
||||||
).total_seconds()
|
).total_seconds()
|
||||||
|
@ -259,7 +259,7 @@ class WorkflowCycleManage:
|
||||||
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
session.add(workflow_node_execution)
|
session.add(workflow_node_execution)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -282,7 +282,7 @@ class WorkflowCycleManage:
|
||||||
execution_metadata = (
|
execution_metadata = (
|
||||||
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
||||||
)
|
)
|
||||||
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
elapsed_time = (finished_at - event.start_at).total_seconds()
|
elapsed_time = (finished_at - event.start_at).total_seconds()
|
||||||
|
|
||||||
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
|
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
|
||||||
|
@ -326,7 +326,7 @@ class WorkflowCycleManage:
|
||||||
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
||||||
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
||||||
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
||||||
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
elapsed_time = (finished_at - event.start_at).total_seconds()
|
elapsed_time = (finished_at - event.start_at).total_seconds()
|
||||||
execution_metadata = (
|
execution_metadata = (
|
||||||
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
||||||
|
@ -654,7 +654,7 @@ class WorkflowCycleManage:
|
||||||
if event.error is None
|
if event.error is None
|
||||||
else WorkflowNodeExecutionStatus.FAILED,
|
else WorkflowNodeExecutionStatus.FAILED,
|
||||||
error=None,
|
error=None,
|
||||||
elapsed_time=(datetime.now(timezone.utc).replace(tzinfo=None) - event.start_at).total_seconds(),
|
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||||
execution_metadata=event.metadata,
|
execution_metadata=event.metadata,
|
||||||
finished_at=int(time.time()),
|
finished_at=int(time.time()),
|
||||||
|
|
|
@ -240,7 +240,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
if provider_record:
|
if provider_record:
|
||||||
provider_record.encrypted_config = json.dumps(credentials)
|
provider_record.encrypted_config = json.dumps(credentials)
|
||||||
provider_record.is_valid = True
|
provider_record.is_valid = True
|
||||||
provider_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
provider_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
provider_record = Provider(
|
provider_record = Provider(
|
||||||
|
@ -394,7 +394,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
if provider_model_record:
|
if provider_model_record:
|
||||||
provider_model_record.encrypted_config = json.dumps(credentials)
|
provider_model_record.encrypted_config = json.dumps(credentials)
|
||||||
provider_model_record.is_valid = True
|
provider_model_record.is_valid = True
|
||||||
provider_model_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
provider_model_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
provider_model_record = ProviderModel(
|
provider_model_record = ProviderModel(
|
||||||
|
@ -468,7 +468,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
|
|
||||||
if model_setting:
|
if model_setting:
|
||||||
model_setting.enabled = True
|
model_setting.enabled = True
|
||||||
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
model_setting = ProviderModelSetting(
|
model_setting = ProviderModelSetting(
|
||||||
|
@ -503,7 +503,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
|
|
||||||
if model_setting:
|
if model_setting:
|
||||||
model_setting.enabled = False
|
model_setting.enabled = False
|
||||||
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
model_setting = ProviderModelSetting(
|
model_setting = ProviderModelSetting(
|
||||||
|
@ -570,7 +570,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
|
|
||||||
if model_setting:
|
if model_setting:
|
||||||
model_setting.load_balancing_enabled = True
|
model_setting.load_balancing_enabled = True
|
||||||
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
model_setting = ProviderModelSetting(
|
model_setting = ProviderModelSetting(
|
||||||
|
@ -605,7 +605,7 @@ class ProviderConfiguration(BaseModel):
|
||||||
|
|
||||||
if model_setting:
|
if model_setting:
|
||||||
model_setting.load_balancing_enabled = False
|
model_setting.load_balancing_enabled = False
|
||||||
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
model_setting = ProviderModelSetting(
|
model_setting = ProviderModelSetting(
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class FileType(str, Enum):
|
class FileType(StrEnum):
|
||||||
IMAGE = "image"
|
IMAGE = "image"
|
||||||
DOCUMENT = "document"
|
DOCUMENT = "document"
|
||||||
AUDIO = "audio"
|
AUDIO = "audio"
|
||||||
|
@ -16,7 +16,7 @@ class FileType(str, Enum):
|
||||||
raise ValueError(f"No matching enum found for value '{value}'")
|
raise ValueError(f"No matching enum found for value '{value}'")
|
||||||
|
|
||||||
|
|
||||||
class FileTransferMethod(str, Enum):
|
class FileTransferMethod(StrEnum):
|
||||||
REMOTE_URL = "remote_url"
|
REMOTE_URL = "remote_url"
|
||||||
LOCAL_FILE = "local_file"
|
LOCAL_FILE = "local_file"
|
||||||
TOOL_FILE = "tool_file"
|
TOOL_FILE = "tool_file"
|
||||||
|
@ -29,7 +29,7 @@ class FileTransferMethod(str, Enum):
|
||||||
raise ValueError(f"No matching enum found for value '{value}'")
|
raise ValueError(f"No matching enum found for value '{value}'")
|
||||||
|
|
||||||
|
|
||||||
class FileBelongsTo(str, Enum):
|
class FileBelongsTo(StrEnum):
|
||||||
USER = "user"
|
USER = "user"
|
||||||
ASSISTANT = "assistant"
|
ASSISTANT = "assistant"
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ class FileBelongsTo(str, Enum):
|
||||||
raise ValueError(f"No matching enum found for value '{value}'")
|
raise ValueError(f"No matching enum found for value '{value}'")
|
||||||
|
|
||||||
|
|
||||||
class FileAttribute(str, Enum):
|
class FileAttribute(StrEnum):
|
||||||
TYPE = "type"
|
TYPE = "type"
|
||||||
SIZE = "size"
|
SIZE = "size"
|
||||||
NAME = "name"
|
NAME = "name"
|
||||||
|
@ -51,5 +51,5 @@ class FileAttribute(str, Enum):
|
||||||
EXTENSION = "extension"
|
EXTENSION = "extension"
|
||||||
|
|
||||||
|
|
||||||
class ArrayFileAttribute(str, Enum):
|
class ArrayFileAttribute(StrEnum):
|
||||||
LENGTH = "length"
|
LENGTH = "length"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ class CodeExecutionResponse(BaseModel):
|
||||||
data: Data
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
class CodeLanguage(str, Enum):
|
class CodeLanguage(StrEnum):
|
||||||
PYTHON3 = "python3"
|
PYTHON3 = "python3"
|
||||||
JINJA2 = "jinja2"
|
JINJA2 = "jinja2"
|
||||||
JAVASCRIPT = "javascript"
|
JAVASCRIPT = "javascript"
|
||||||
|
|
|
@ -84,7 +84,7 @@ class IndexingRunner:
|
||||||
except ProviderTokenNotInitError as e:
|
except ProviderTokenNotInitError as e:
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e.description)
|
dataset_document.error = str(e.description)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
except ObjectDeletedError:
|
except ObjectDeletedError:
|
||||||
logging.warning("Document deleted, document id: {}".format(dataset_document.id))
|
logging.warning("Document deleted, document id: {}".format(dataset_document.id))
|
||||||
|
@ -92,7 +92,7 @@ class IndexingRunner:
|
||||||
logging.exception("consume document failed")
|
logging.exception("consume document failed")
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e)
|
dataset_document.error = str(e)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def run_in_splitting_status(self, dataset_document: DatasetDocument):
|
def run_in_splitting_status(self, dataset_document: DatasetDocument):
|
||||||
|
@ -140,13 +140,13 @@ class IndexingRunner:
|
||||||
except ProviderTokenNotInitError as e:
|
except ProviderTokenNotInitError as e:
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e.description)
|
dataset_document.error = str(e.description)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("consume document failed")
|
logging.exception("consume document failed")
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e)
|
dataset_document.error = str(e)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def run_in_indexing_status(self, dataset_document: DatasetDocument):
|
def run_in_indexing_status(self, dataset_document: DatasetDocument):
|
||||||
|
@ -198,13 +198,13 @@ class IndexingRunner:
|
||||||
except ProviderTokenNotInitError as e:
|
except ProviderTokenNotInitError as e:
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e.description)
|
dataset_document.error = str(e.description)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("consume document failed")
|
logging.exception("consume document failed")
|
||||||
dataset_document.indexing_status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e)
|
dataset_document.error = str(e)
|
||||||
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def indexing_estimate(
|
def indexing_estimate(
|
||||||
|
@ -357,7 +357,7 @@ class IndexingRunner:
|
||||||
after_indexing_status="splitting",
|
after_indexing_status="splitting",
|
||||||
extra_update_params={
|
extra_update_params={
|
||||||
DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs),
|
DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs),
|
||||||
DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -449,7 +449,7 @@ class IndexingRunner:
|
||||||
doc_store.add_documents(documents)
|
doc_store.add_documents(documents)
|
||||||
|
|
||||||
# update document status to indexing
|
# update document status to indexing
|
||||||
cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
self._update_document_index_status(
|
self._update_document_index_status(
|
||||||
document_id=dataset_document.id,
|
document_id=dataset_document.id,
|
||||||
after_indexing_status="indexing",
|
after_indexing_status="indexing",
|
||||||
|
@ -464,7 +464,7 @@ class IndexingRunner:
|
||||||
dataset_document_id=dataset_document.id,
|
dataset_document_id=dataset_document.id,
|
||||||
update_params={
|
update_params={
|
||||||
DocumentSegment.status: "indexing",
|
DocumentSegment.status: "indexing",
|
||||||
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -669,7 +669,7 @@ class IndexingRunner:
|
||||||
after_indexing_status="completed",
|
after_indexing_status="completed",
|
||||||
extra_update_params={
|
extra_update_params={
|
||||||
DatasetDocument.tokens: tokens,
|
DatasetDocument.tokens: tokens,
|
||||||
DatasetDocument.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DatasetDocument.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at,
|
DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at,
|
||||||
DatasetDocument.error: None,
|
DatasetDocument.error: None,
|
||||||
},
|
},
|
||||||
|
@ -694,7 +694,7 @@ class IndexingRunner:
|
||||||
{
|
{
|
||||||
DocumentSegment.status: "completed",
|
DocumentSegment.status: "completed",
|
||||||
DocumentSegment.enabled: True,
|
DocumentSegment.enabled: True,
|
||||||
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -727,7 +727,7 @@ class IndexingRunner:
|
||||||
{
|
{
|
||||||
DocumentSegment.status: "completed",
|
DocumentSegment.status: "completed",
|
||||||
DocumentSegment.enabled: True,
|
DocumentSegment.enabled: True,
|
||||||
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -838,7 +838,7 @@ class IndexingRunner:
|
||||||
doc_store.add_documents(documents)
|
doc_store.add_documents(documents)
|
||||||
|
|
||||||
# update document status to indexing
|
# update document status to indexing
|
||||||
cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
self._update_document_index_status(
|
self._update_document_index_status(
|
||||||
document_id=dataset_document.id,
|
document_id=dataset_document.id,
|
||||||
after_indexing_status="indexing",
|
after_indexing_status="indexing",
|
||||||
|
@ -853,7 +853,7 @@ class IndexingRunner:
|
||||||
dataset_document_id=dataset_document.id,
|
dataset_document_id=dataset_document.id,
|
||||||
update_params={
|
update_params={
|
||||||
DocumentSegment.status: "indexing",
|
DocumentSegment.status: "indexing",
|
||||||
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from abc import ABC
|
from abc import ABC
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
@ -93,7 +93,7 @@ class ImagePromptMessageContent(PromptMessageContent):
|
||||||
Model class for image prompt message content.
|
Model class for image prompt message content.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class DETAIL(str, Enum):
|
class DETAIL(StrEnum):
|
||||||
LOW = "low"
|
LOW = "low"
|
||||||
HIGH = "high"
|
HIGH = "high"
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
@ -89,7 +89,7 @@ class ModelFeature(Enum):
|
||||||
STREAM_TOOL_CALL = "stream-tool-call"
|
STREAM_TOOL_CALL = "stream-tool-call"
|
||||||
|
|
||||||
|
|
||||||
class DefaultParameterName(str, Enum):
|
class DefaultParameterName(StrEnum):
|
||||||
"""
|
"""
|
||||||
Enum class for parameter template variable.
|
Enum class for parameter template variable.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict, field_validator
|
from pydantic import BaseModel, ConfigDict, field_validator
|
||||||
|
@ -122,7 +122,7 @@ trace_info_info_map = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class TraceTaskName(str, Enum):
|
class TraceTaskName(StrEnum):
|
||||||
CONVERSATION_TRACE = "conversation"
|
CONVERSATION_TRACE = "conversation"
|
||||||
WORKFLOW_TRACE = "workflow"
|
WORKFLOW_TRACE = "workflow"
|
||||||
MESSAGE_TRACE = "message"
|
MESSAGE_TRACE = "message"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||||
|
@ -39,7 +39,7 @@ def validate_input_output(v, field_name):
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
|
||||||
class LevelEnum(str, Enum):
|
class LevelEnum(StrEnum):
|
||||||
DEBUG = "DEBUG"
|
DEBUG = "DEBUG"
|
||||||
WARNING = "WARNING"
|
WARNING = "WARNING"
|
||||||
ERROR = "ERROR"
|
ERROR = "ERROR"
|
||||||
|
@ -178,7 +178,7 @@ class LangfuseSpan(BaseModel):
|
||||||
return validate_input_output(v, field_name)
|
return validate_input_output(v, field_name)
|
||||||
|
|
||||||
|
|
||||||
class UnitEnum(str, Enum):
|
class UnitEnum(StrEnum):
|
||||||
CHARACTERS = "CHARACTERS"
|
CHARACTERS = "CHARACTERS"
|
||||||
TOKENS = "TOKENS"
|
TOKENS = "TOKENS"
|
||||||
SECONDS = "SECONDS"
|
SECONDS = "SECONDS"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
@ -8,7 +8,7 @@ from pydantic_core.core_schema import ValidationInfo
|
||||||
from core.ops.utils import replace_text_with_content
|
from core.ops.utils import replace_text_with_content
|
||||||
|
|
||||||
|
|
||||||
class LangSmithRunType(str, Enum):
|
class LangSmithRunType(StrEnum):
|
||||||
tool = "tool"
|
tool = "tool"
|
||||||
chain = "chain"
|
chain = "chain"
|
||||||
llm = "llm"
|
llm = "llm"
|
||||||
|
|
|
@ -23,7 +23,7 @@ if TYPE_CHECKING:
|
||||||
from core.file.models import File
|
from core.file.models import File
|
||||||
|
|
||||||
|
|
||||||
class ModelMode(str, enum.Enum):
|
class ModelMode(enum.StrEnum):
|
||||||
COMPLETION = "completion"
|
COMPLETION = "completion"
|
||||||
CHAT = "chat"
|
CHAT = "chat"
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class KeyWordType(str, Enum):
|
class KeyWordType(StrEnum):
|
||||||
JIEBA = "jieba"
|
JIEBA = "jieba"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class VectorType(str, Enum):
|
class VectorType(StrEnum):
|
||||||
ANALYTICDB = "analyticdb"
|
ANALYTICDB = "analyticdb"
|
||||||
CHROMA = "chroma"
|
CHROMA = "chroma"
|
||||||
MILVUS = "milvus"
|
MILVUS = "milvus"
|
||||||
|
|
|
@ -114,10 +114,10 @@ class WordExtractor(BaseExtractor):
|
||||||
mime_type=mime_type or "",
|
mime_type=mime_type or "",
|
||||||
created_by=self.user_id,
|
created_by=self.user_id,
|
||||||
created_by_role=CreatedByRole.ACCOUNT,
|
created_by_role=CreatedByRole.ACCOUNT,
|
||||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
used=True,
|
used=True,
|
||||||
used_by=self.user_id,
|
used_by=self.user_id,
|
||||||
used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(upload_file)
|
db.session.add(upload_file)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class RerankMode(str, Enum):
|
class RerankMode(StrEnum):
|
||||||
RERANKING_MODEL = "reranking_model"
|
RERANKING_MODEL = "reranking_model"
|
||||||
WEIGHTED_SCORE = "weighted_score"
|
WEIGHTED_SCORE = "weighted_score"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Any, Optional, Union, cast
|
from typing import Any, Optional, Union, cast
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
@ -137,7 +137,7 @@ class ToolParameterOption(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class ToolParameter(BaseModel):
|
class ToolParameter(BaseModel):
|
||||||
class ToolParameterType(str, Enum):
|
class ToolParameterType(StrEnum):
|
||||||
STRING = "string"
|
STRING = "string"
|
||||||
NUMBER = "number"
|
NUMBER = "number"
|
||||||
BOOLEAN = "boolean"
|
BOOLEAN = "boolean"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
from pytz import timezone as pytz_timezone
|
from pytz import timezone as pytz_timezone
|
||||||
|
@ -20,7 +20,7 @@ class CurrentTimeTool(BuiltinTool):
|
||||||
tz = tool_parameters.get("timezone", "UTC")
|
tz = tool_parameters.get("timezone", "UTC")
|
||||||
fm = tool_parameters.get("format") or "%Y-%m-%d %H:%M:%S %Z"
|
fm = tool_parameters.get("format") or "%Y-%m-%d %H:%M:%S %Z"
|
||||||
if tz == "UTC":
|
if tz == "UTC":
|
||||||
return self.create_text_message(f"{datetime.now(timezone.utc).strftime(fm)}")
|
return self.create_text_message(f"{datetime.now(UTC).strftime(fm)}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tz = pytz_timezone(tz)
|
tz = pytz_timezone(tz)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict, field_validator
|
from pydantic import BaseModel, ConfigDict, field_validator
|
||||||
|
@ -62,7 +62,7 @@ class Tool(BaseModel, ABC):
|
||||||
def __init__(self, **data: Any):
|
def __init__(self, **data: Any):
|
||||||
super().__init__(**data)
|
super().__init__(**data)
|
||||||
|
|
||||||
class VariableKey(str, Enum):
|
class VariableKey(StrEnum):
|
||||||
IMAGE = "image"
|
IMAGE = "image"
|
||||||
DOCUMENT = "document"
|
DOCUMENT = "document"
|
||||||
VIDEO = "video"
|
VIDEO = "video"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from mimetypes import guess_type
|
from mimetypes import guess_type
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
@ -158,7 +158,7 @@ class ToolEngine:
|
||||||
"""
|
"""
|
||||||
Invoke the tool with the given arguments.
|
Invoke the tool with the given arguments.
|
||||||
"""
|
"""
|
||||||
started_at = datetime.now(timezone.utc)
|
started_at = datetime.now(UTC)
|
||||||
meta = ToolInvokeMeta(
|
meta = ToolInvokeMeta(
|
||||||
time_cost=0.0,
|
time_cost=0.0,
|
||||||
error=None,
|
error=None,
|
||||||
|
@ -176,7 +176,7 @@ class ToolEngine:
|
||||||
meta.error = str(e)
|
meta.error = str(e)
|
||||||
raise ToolEngineInvokeError(meta)
|
raise ToolEngineInvokeError(meta)
|
||||||
finally:
|
finally:
|
||||||
ended_at = datetime.now(timezone.utc)
|
ended_at = datetime.now(UTC)
|
||||||
meta.time_cost = (ended_at - started_at).total_seconds()
|
meta.time_cost = (ended_at - started_at).total_seconds()
|
||||||
|
|
||||||
return meta, response
|
return meta, response
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class SegmentType(str, Enum):
|
class SegmentType(StrEnum):
|
||||||
NONE = "none"
|
NONE = "none"
|
||||||
NUMBER = "number"
|
NUMBER = "number"
|
||||||
STRING = "string"
|
STRING = "string"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
@ -8,7 +8,7 @@ from core.model_runtime.entities.llm_entities import LLMUsage
|
||||||
from models.workflow import WorkflowNodeExecutionStatus
|
from models.workflow import WorkflowNodeExecutionStatus
|
||||||
|
|
||||||
|
|
||||||
class NodeRunMetadataKey(str, Enum):
|
class NodeRunMetadataKey(StrEnum):
|
||||||
"""
|
"""
|
||||||
Node Run Metadata Key.
|
Node Run Metadata Key.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class SystemVariableKey(str, Enum):
|
class SystemVariableKey(StrEnum):
|
||||||
"""
|
"""
|
||||||
System Variables.
|
System Variables.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ class RouteNodeState(BaseModel):
|
||||||
raise Exception(f"Invalid route status {run_result.status}")
|
raise Exception(f"Invalid route status {run_result.status}")
|
||||||
|
|
||||||
self.node_run_result = run_result
|
self.node_run_result = run_result
|
||||||
self.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
self.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
|
|
||||||
class RuntimeRouteState(BaseModel):
|
class RuntimeRouteState(BaseModel):
|
||||||
|
@ -81,7 +81,7 @@ class RuntimeRouteState(BaseModel):
|
||||||
|
|
||||||
:param node_id: node id
|
:param node_id: node id
|
||||||
"""
|
"""
|
||||||
state = RouteNodeState(node_id=node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
|
state = RouteNodeState(node_id=node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
|
||||||
self.node_state_mapping[state.id] = state
|
self.node_state_mapping[state.id] = state
|
||||||
return state
|
return state
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class NodeType(str, Enum):
|
class NodeType(StrEnum):
|
||||||
START = "start"
|
START = "start"
|
||||||
END = "end"
|
END = "end"
|
||||||
ANSWER = "answer"
|
ANSWER = "answer"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
@ -6,7 +6,7 @@ from pydantic import Field
|
||||||
from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData
|
from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData
|
||||||
|
|
||||||
|
|
||||||
class ErrorHandleMode(str, Enum):
|
class ErrorHandleMode(StrEnum):
|
||||||
TERMINATED = "terminated"
|
TERMINATED = "terminated"
|
||||||
CONTINUE_ON_ERROR = "continue-on-error"
|
CONTINUE_ON_ERROR = "continue-on-error"
|
||||||
REMOVE_ABNORMAL_OUTPUT = "remove-abnormal-output"
|
REMOVE_ABNORMAL_OUTPUT = "remove-abnormal-output"
|
||||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
||||||
import uuid
|
import uuid
|
||||||
from collections.abc import Generator, Mapping, Sequence
|
from collections.abc import Generator, Mapping, Sequence
|
||||||
from concurrent.futures import Future, wait
|
from concurrent.futures import Future, wait
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from queue import Empty, Queue
|
from queue import Empty, Queue
|
||||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
||||||
thread_pool_id=self.thread_pool_id,
|
thread_pool_id=self.thread_pool_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
start_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
yield IterationRunStartedEvent(
|
yield IterationRunStartedEvent(
|
||||||
iteration_id=self.id,
|
iteration_id=self.id,
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from core.workflow.nodes.base import BaseNodeData
|
from core.workflow.nodes.base import BaseNodeData
|
||||||
|
|
||||||
|
|
||||||
class WriteMode(str, Enum):
|
class WriteMode(StrEnum):
|
||||||
OVER_WRITE = "over-write"
|
OVER_WRITE = "over-write"
|
||||||
APPEND = "append"
|
APPEND = "append"
|
||||||
CLEAR = "clear"
|
CLEAR = "clear"
|
||||||
|
|
|
@ -33,7 +33,7 @@ def handle(sender, **kwargs):
|
||||||
raise NotFound("Document not found")
|
raise NotFound("Document not found")
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
documents.append(document)
|
documents.append(document)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity
|
from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity
|
||||||
from events.message_event import message_was_created
|
from events.message_event import message_was_created
|
||||||
|
@ -17,5 +17,5 @@ def handle(sender, **kwargs):
|
||||||
db.session.query(Provider).filter(
|
db.session.query(Provider).filter(
|
||||||
Provider.tenant_id == application_generate_entity.app_config.tenant_id,
|
Provider.tenant_id == application_generate_entity.app_config.tenant_id,
|
||||||
Provider.provider_name == application_generate_entity.model_conf.provider,
|
Provider.provider_name == application_generate_entity.model_conf.provider,
|
||||||
).update({"last_used": datetime.now(timezone.utc).replace(tzinfo=None)})
|
).update({"last_used": datetime.now(UTC).replace(tzinfo=None)})
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import UTC, datetime, timedelta
|
||||||
|
|
||||||
from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
|
from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ class AzureBlobStorage(BaseStorage):
|
||||||
account_key=self.account_key,
|
account_key=self.account_key,
|
||||||
resource_types=ResourceTypes(service=True, container=True, object=True),
|
resource_types=ResourceTypes(service=True, container=True, object=True),
|
||||||
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
|
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
|
||||||
expiry=datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(hours=1),
|
expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
|
||||||
)
|
)
|
||||||
redis_client.set(cache_key, sas_token, ex=3000)
|
redis_client.set(cache_key, sas_token, ex=3000)
|
||||||
return BlobServiceClient(account_url=self.account_url, credential=sas_token)
|
return BlobServiceClient(account_url=self.account_url, credential=sas_token)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class StorageType(str, Enum):
|
class StorageType(StrEnum):
|
||||||
ALIYUN_OSS = "aliyun-oss"
|
ALIYUN_OSS = "aliyun-oss"
|
||||||
AZURE_BLOB = "azure-blob"
|
AZURE_BLOB = "azure-blob"
|
||||||
BAIDU_OBS = "baidu-obs"
|
BAIDU_OBS = "baidu-obs"
|
||||||
|
|
|
@ -70,7 +70,7 @@ class NotionOAuth(OAuthDataSource):
|
||||||
if data_source_binding:
|
if data_source_binding:
|
||||||
data_source_binding.source_info = source_info
|
data_source_binding.source_info = source_info
|
||||||
data_source_binding.disabled = False
|
data_source_binding.disabled = False
|
||||||
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
new_data_source_binding = DataSourceOauthBinding(
|
new_data_source_binding = DataSourceOauthBinding(
|
||||||
|
@ -106,7 +106,7 @@ class NotionOAuth(OAuthDataSource):
|
||||||
if data_source_binding:
|
if data_source_binding:
|
||||||
data_source_binding.source_info = source_info
|
data_source_binding.source_info = source_info
|
||||||
data_source_binding.disabled = False
|
data_source_binding.disabled = False
|
||||||
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
new_data_source_binding = DataSourceOauthBinding(
|
new_data_source_binding = DataSourceOauthBinding(
|
||||||
|
@ -141,7 +141,7 @@ class NotionOAuth(OAuthDataSource):
|
||||||
}
|
}
|
||||||
data_source_binding.source_info = new_source_info
|
data_source_binding.source_info = new_source_info
|
||||||
data_source_binding.disabled = False
|
data_source_binding.disabled = False
|
||||||
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
raise ValueError("Data source binding not found")
|
raise ValueError("Data source binding not found")
|
||||||
|
|
|
@ -8,7 +8,7 @@ from extensions.ext_database import db
|
||||||
from .types import StringUUID
|
from .types import StringUUID
|
||||||
|
|
||||||
|
|
||||||
class AccountStatus(str, enum.Enum):
|
class AccountStatus(enum.StrEnum):
|
||||||
PENDING = "pending"
|
PENDING = "pending"
|
||||||
UNINITIALIZED = "uninitialized"
|
UNINITIALIZED = "uninitialized"
|
||||||
ACTIVE = "active"
|
ACTIVE = "active"
|
||||||
|
@ -121,12 +121,12 @@ class Account(UserMixin, db.Model):
|
||||||
return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR
|
return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR
|
||||||
|
|
||||||
|
|
||||||
class TenantStatus(str, enum.Enum):
|
class TenantStatus(enum.StrEnum):
|
||||||
NORMAL = "normal"
|
NORMAL = "normal"
|
||||||
ARCHIVE = "archive"
|
ARCHIVE = "archive"
|
||||||
|
|
||||||
|
|
||||||
class TenantAccountRole(str, enum.Enum):
|
class TenantAccountRole(enum.StrEnum):
|
||||||
OWNER = "owner"
|
OWNER = "owner"
|
||||||
ADMIN = "admin"
|
ADMIN = "admin"
|
||||||
EDITOR = "editor"
|
EDITOR = "editor"
|
||||||
|
|
|
@ -23,7 +23,7 @@ from .model import App, Tag, TagBinding, UploadFile
|
||||||
from .types import StringUUID
|
from .types import StringUUID
|
||||||
|
|
||||||
|
|
||||||
class DatasetPermissionEnum(str, enum.Enum):
|
class DatasetPermissionEnum(enum.StrEnum):
|
||||||
ONLY_ME = "only_me"
|
ONLY_ME = "only_me"
|
||||||
ALL_TEAM = "all_team_members"
|
ALL_TEAM = "all_team_members"
|
||||||
PARTIAL_TEAM = "partial_members"
|
PARTIAL_TEAM = "partial_members"
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class CreatedByRole(str, Enum):
|
class CreatedByRole(StrEnum):
|
||||||
ACCOUNT = "account"
|
ACCOUNT = "account"
|
||||||
END_USER = "end_user"
|
END_USER = "end_user"
|
||||||
|
|
||||||
|
|
||||||
class UserFrom(str, Enum):
|
class UserFrom(StrEnum):
|
||||||
ACCOUNT = "account"
|
ACCOUNT = "account"
|
||||||
END_USER = "end-user"
|
END_USER = "end-user"
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRunTriggeredFrom(str, Enum):
|
class WorkflowRunTriggeredFrom(StrEnum):
|
||||||
DEBUGGING = "debugging"
|
DEBUGGING = "debugging"
|
||||||
APP_RUN = "app-run"
|
APP_RUN = "app-run"
|
||||||
|
|
|
@ -3,7 +3,7 @@ import re
|
||||||
import uuid
|
import uuid
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum, StrEnum
|
||||||
from typing import Any, Literal, Optional
|
from typing import Any, Literal, Optional
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
@ -32,7 +32,7 @@ class DifySetup(db.Model):
|
||||||
setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
|
setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
|
||||||
|
|
||||||
|
|
||||||
class AppMode(str, Enum):
|
class AppMode(StrEnum):
|
||||||
COMPLETION = "completion"
|
COMPLETION = "completion"
|
||||||
WORKFLOW = "workflow"
|
WORKFLOW = "workflow"
|
||||||
CHAT = "chat"
|
CHAT = "chat"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from celery import states
|
from celery import states
|
||||||
|
|
||||||
|
@ -16,8 +16,8 @@ class CeleryTask(db.Model):
|
||||||
result = db.Column(db.PickleType, nullable=True)
|
result = db.Column(db.PickleType, nullable=True)
|
||||||
date_done = db.Column(
|
date_done = db.Column(
|
||||||
db.DateTime,
|
db.DateTime,
|
||||||
default=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
|
default=lambda: datetime.now(UTC).replace(tzinfo=None),
|
||||||
onupdate=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
|
onupdate=lambda: datetime.now(UTC).replace(tzinfo=None),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
)
|
)
|
||||||
traceback = db.Column(db.Text, nullable=True)
|
traceback = db.Column(db.Text, nullable=True)
|
||||||
|
@ -37,4 +37,4 @@ class CeleryTaskSet(db.Model):
|
||||||
id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
|
id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
|
||||||
taskset_id = db.Column(db.String(155), unique=True)
|
taskset_id = db.Column(db.String(155), unique=True)
|
||||||
result = db.Column(db.PickleType, nullable=True)
|
result = db.Column(db.PickleType, nullable=True)
|
||||||
date_done = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc).replace(tzinfo=None), nullable=True)
|
date_done = db.Column(db.DateTime, default=lambda: datetime.now(UTC).replace(tzinfo=None), nullable=True)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import json
|
import json
|
||||||
from collections.abc import Mapping, Sequence
|
from collections.abc import Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ class Workflow(db.Model):
|
||||||
)
|
)
|
||||||
updated_by: Mapped[Optional[str]] = mapped_column(StringUUID)
|
updated_by: Mapped[Optional[str]] = mapped_column(StringUUID)
|
||||||
updated_at: Mapped[datetime] = mapped_column(
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
sa.DateTime, nullable=False, default=datetime.now(tz=timezone.utc), server_onupdate=func.current_timestamp()
|
sa.DateTime, nullable=False, default=datetime.now(tz=UTC), server_onupdate=func.current_timestamp()
|
||||||
)
|
)
|
||||||
_environment_variables: Mapped[str] = mapped_column(
|
_environment_variables: Mapped[str] = mapped_column(
|
||||||
"environment_variables", db.Text, nullable=False, server_default="{}"
|
"environment_variables", db.Text, nullable=False, server_default="{}"
|
||||||
|
|
86
api/poetry.lock
generated
86
api/poetry.lock
generated
|
@ -114,7 +114,6 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
aiohappyeyeballs = ">=2.3.0"
|
aiohappyeyeballs = ">=2.3.0"
|
||||||
aiosignal = ">=1.1.2"
|
aiosignal = ">=1.1.2"
|
||||||
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
|
|
||||||
attrs = ">=17.3.0"
|
attrs = ">=17.3.0"
|
||||||
frozenlist = ">=1.1.1"
|
frozenlist = ">=1.1.1"
|
||||||
multidict = ">=4.5,<7.0"
|
multidict = ">=4.5,<7.0"
|
||||||
|
@ -465,10 +464,8 @@ files = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
|
|
||||||
idna = ">=2.8"
|
idna = ">=2.8"
|
||||||
sniffio = ">=1.1"
|
sniffio = ">=1.1"
|
||||||
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||||
|
@ -501,9 +498,6 @@ files = [
|
||||||
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
|
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
||||||
|
|
||||||
|
@ -1073,10 +1067,8 @@ files = [
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "os_name == \"nt\""}
|
colorama = {version = "*", markers = "os_name == \"nt\""}
|
||||||
importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
|
|
||||||
packaging = ">=19.1"
|
packaging = ">=19.1"
|
||||||
pyproject_hooks = "*"
|
pyproject_hooks = "*"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
|
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
|
||||||
|
@ -2397,20 +2389,6 @@ files = [
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["pytest"]
|
tests = ["pytest"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "exceptiongroup"
|
|
||||||
version = "1.2.2"
|
|
||||||
description = "Backport of PEP 654 (exception groups)"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
|
||||||
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
test = ["pytest (>=6)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastapi"
|
name = "fastapi"
|
||||||
version = "0.115.4"
|
version = "0.115.4"
|
||||||
|
@ -3045,10 +3023,7 @@ files = [
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""}
|
cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""}
|
||||||
greenlet = [
|
greenlet = {version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""}
|
||||||
{version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""},
|
|
||||||
{version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""},
|
|
||||||
]
|
|
||||||
"zope.event" = "*"
|
"zope.event" = "*"
|
||||||
"zope.interface" = "*"
|
"zope.interface" = "*"
|
||||||
|
|
||||||
|
@ -3166,14 +3141,8 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
google-auth = ">=2.14.1,<3.0.dev0"
|
google-auth = ">=2.14.1,<3.0.dev0"
|
||||||
googleapis-common-protos = ">=1.56.2,<2.0.dev0"
|
googleapis-common-protos = ">=1.56.2,<2.0.dev0"
|
||||||
grpcio = [
|
grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
|
||||||
{version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
|
grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
|
||||||
{version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
|
|
||||||
]
|
|
||||||
grpcio-status = [
|
|
||||||
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
|
|
||||||
{version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
|
|
||||||
]
|
|
||||||
proto-plus = ">=1.22.3,<2.0.0dev"
|
proto-plus = ">=1.22.3,<2.0.0dev"
|
||||||
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
|
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
|
||||||
requests = ">=2.18.0,<3.0.0.dev0"
|
requests = ">=2.18.0,<3.0.0.dev0"
|
||||||
|
@ -5495,9 +5464,6 @@ files = [
|
||||||
{file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
|
{file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "multiprocess"
|
name = "multiprocess"
|
||||||
version = "0.70.17"
|
version = "0.70.17"
|
||||||
|
@ -6370,7 +6336,6 @@ bottleneck = {version = ">=1.3.6", optional = true, markers = "extra == \"perfor
|
||||||
numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""}
|
numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""}
|
||||||
numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""}
|
numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""}
|
||||||
numpy = [
|
numpy = [
|
||||||
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
|
|
||||||
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
|
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
|
||||||
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
|
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
|
||||||
]
|
]
|
||||||
|
@ -6654,7 +6619,6 @@ files = [
|
||||||
deprecation = ">=2.1.0,<3.0.0"
|
deprecation = ">=2.1.0,<3.0.0"
|
||||||
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
|
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
|
||||||
pydantic = ">=1.9,<3.0"
|
pydantic = ">=1.9,<3.0"
|
||||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "posthog"
|
name = "posthog"
|
||||||
|
@ -7385,9 +7349,6 @@ files = [
|
||||||
{file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"},
|
{file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
crypto = ["cryptography"]
|
crypto = ["cryptography"]
|
||||||
cryptodome = ["PyCryptodome"]
|
cryptodome = ["PyCryptodome"]
|
||||||
|
@ -7476,11 +7437,9 @@ files = [
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
|
||||||
iniconfig = "*"
|
iniconfig = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
pluggy = ">=1.5,<2"
|
pluggy = ">=1.5,<2"
|
||||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||||
|
@ -7518,7 +7477,6 @@ files = [
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
pytest = ">=8.3.3"
|
pytest = ">=8.3.3"
|
||||||
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"]
|
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"]
|
||||||
|
@ -8333,7 +8291,6 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
markdown-it-py = ">=2.2.0"
|
markdown-it-py = ">=2.2.0"
|
||||||
pygments = ">=2.13.0,<3.0.0"
|
pygments = ">=2.13.0,<3.0.0"
|
||||||
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||||
|
@ -8740,11 +8697,6 @@ files = [
|
||||||
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"},
|
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"},
|
||||||
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"},
|
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"},
|
||||||
{file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"},
|
{file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"},
|
||||||
{file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"},
|
|
||||||
{file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"},
|
|
||||||
{file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"},
|
|
||||||
{file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"},
|
|
||||||
{file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"},
|
|
||||||
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"},
|
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"},
|
||||||
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"},
|
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"},
|
||||||
{file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"},
|
{file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"},
|
||||||
|
@ -9183,22 +9135,6 @@ httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
|
||||||
python-dateutil = ">=2.8.2,<3.0.0"
|
python-dateutil = ">=2.8.2,<3.0.0"
|
||||||
typing-extensions = ">=4.2.0,<5.0.0"
|
typing-extensions = ">=4.2.0,<5.0.0"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strenum"
|
|
||||||
version = "0.4.15"
|
|
||||||
description = "An Enum that inherits from str."
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
|
|
||||||
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
|
|
||||||
release = ["twine"]
|
|
||||||
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strictyaml"
|
name = "strictyaml"
|
||||||
version = "1.7.3"
|
version = "1.7.3"
|
||||||
|
@ -9605,17 +9541,6 @@ files = [
|
||||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tomli"
|
|
||||||
version = "2.0.2"
|
|
||||||
description = "A lil' TOML parser"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
|
|
||||||
{file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tos"
|
name = "tos"
|
||||||
version = "2.7.2"
|
version = "2.7.2"
|
||||||
|
@ -10035,7 +9960,6 @@ h11 = ">=0.8"
|
||||||
httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
|
httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
|
||||||
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
|
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
|
||||||
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
|
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
|
||||||
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
|
||||||
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
|
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
|
||||||
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
|
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
|
||||||
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
|
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
|
||||||
|
@ -11004,5 +10928,5 @@ cffi = ["cffi (>=1.11)"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<3.13"
|
python-versions = ">=3.11,<3.13"
|
||||||
content-hash = "f20bd678044926913dbbc24bd0cf22503a75817aa55f59457ff7822032139b77"
|
content-hash = "96e2529374144624230bceff43f037d6988d6b0de99bbbcc7ee4b2085eb493fa"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[project]
|
[project]
|
||||||
requires-python = ">=3.10,<3.13"
|
requires-python = ">=3.11,<3.13"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
|
@ -161,7 +161,7 @@ pydantic-settings = "~2.6.0"
|
||||||
pydantic_extra_types = "~2.9.0"
|
pydantic_extra_types = "~2.9.0"
|
||||||
pyjwt = "~2.8.0"
|
pyjwt = "~2.8.0"
|
||||||
pypdfium2 = "~4.17.0"
|
pypdfium2 = "~4.17.0"
|
||||||
python = ">=3.10,<3.13"
|
python = ">=3.11,<3.13"
|
||||||
python-docx = "~1.1.0"
|
python-docx = "~1.1.0"
|
||||||
python-dotenv = "1.0.0"
|
python-dotenv = "1.0.0"
|
||||||
pyyaml = "~6.0.1"
|
pyyaml = "~6.0.1"
|
||||||
|
|
|
@ -4,7 +4,7 @@ import logging
|
||||||
import random
|
import random
|
||||||
import secrets
|
import secrets
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import UTC, datetime, timedelta
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
@ -115,15 +115,15 @@ class AccountService:
|
||||||
available_ta.current = True
|
available_ta.current = True
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
if datetime.now(timezone.utc).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
|
if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
|
||||||
account.last_active_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return account
|
return account
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_account_jwt_token(account: Account) -> str:
|
def get_account_jwt_token(account: Account) -> str:
|
||||||
exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
|
exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
exp = int(exp_dt.timestamp())
|
exp = int(exp_dt.timestamp())
|
||||||
payload = {
|
payload = {
|
||||||
"user_id": account.id,
|
"user_id": account.id,
|
||||||
|
@ -160,7 +160,7 @@ class AccountService:
|
||||||
|
|
||||||
if account.status == AccountStatus.PENDING.value:
|
if account.status == AccountStatus.PENDING.value:
|
||||||
account.status = AccountStatus.ACTIVE.value
|
account.status = AccountStatus.ACTIVE.value
|
||||||
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
@ -253,7 +253,7 @@ class AccountService:
|
||||||
# If it exists, update the record
|
# If it exists, update the record
|
||||||
account_integrate.open_id = open_id
|
account_integrate.open_id = open_id
|
||||||
account_integrate.encrypted_token = "" # todo
|
account_integrate.encrypted_token = "" # todo
|
||||||
account_integrate.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
else:
|
else:
|
||||||
# If it does not exist, create a new record
|
# If it does not exist, create a new record
|
||||||
account_integrate = AccountIntegrate(
|
account_integrate = AccountIntegrate(
|
||||||
|
@ -288,7 +288,7 @@ class AccountService:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_login_info(account: Account, *, ip_address: str) -> None:
|
def update_login_info(account: Account, *, ip_address: str) -> None:
|
||||||
"""Update last login time and ip"""
|
"""Update last login time and ip"""
|
||||||
account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
account.last_login_ip = ip_address
|
account.last_login_ip = ip_address
|
||||||
db.session.add(account)
|
db.session.add(account)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -765,7 +765,7 @@ class RegisterService:
|
||||||
)
|
)
|
||||||
|
|
||||||
account.last_login_ip = ip_address
|
account.last_login_ip = ip_address
|
||||||
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
|
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
|
||||||
|
|
||||||
|
@ -805,7 +805,7 @@ class RegisterService:
|
||||||
is_setup=is_setup,
|
is_setup=is_setup,
|
||||||
)
|
)
|
||||||
account.status = AccountStatus.ACTIVE.value if not status else status.value
|
account.status = AccountStatus.ACTIVE.value if not status else status.value
|
||||||
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
if open_id is not None or provider is not None:
|
if open_id is not None or provider is not None:
|
||||||
AccountService.link_account_integrate(provider, open_id, account)
|
AccountService.link_account_integrate(provider, open_id, account)
|
||||||
|
|
|
@ -429,7 +429,7 @@ class AppAnnotationService:
|
||||||
raise NotFound("App annotation not found")
|
raise NotFound("App annotation not found")
|
||||||
annotation_setting.score_threshold = args["score_threshold"]
|
annotation_setting.score_threshold = args["score_threshold"]
|
||||||
annotation_setting.updated_user_id = current_user.id
|
annotation_setting.updated_user_id = current_user.id
|
||||||
annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(annotation_setting)
|
db.session.add(annotation_setting)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
@ -223,7 +223,7 @@ class AppService:
|
||||||
app.icon_background = args.get("icon_background")
|
app.icon_background = args.get("icon_background")
|
||||||
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
|
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
|
||||||
app.updated_by = current_user.id
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
if app.max_active_requests is not None:
|
if app.max_active_requests is not None:
|
||||||
|
@ -240,7 +240,7 @@ class AppService:
|
||||||
"""
|
"""
|
||||||
app.name = name
|
app.name = name
|
||||||
app.updated_by = current_user.id
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -256,7 +256,7 @@ class AppService:
|
||||||
app.icon = icon
|
app.icon = icon
|
||||||
app.icon_background = icon_background
|
app.icon_background = icon_background
|
||||||
app.updated_by = current_user.id
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -273,7 +273,7 @@ class AppService:
|
||||||
|
|
||||||
app.enable_site = enable_site
|
app.enable_site = enable_site
|
||||||
app.updated_by = current_user.id
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -290,7 +290,7 @@ class AppService:
|
||||||
|
|
||||||
app.enable_api = enable_api
|
app.enable_api = enable_api
|
||||||
app.updated_by = current_user.id
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class AuthType(str, Enum):
|
class AuthType(StrEnum):
|
||||||
FIRECRAWL = "firecrawl"
|
FIRECRAWL = "firecrawl"
|
||||||
JINA = "jinareader"
|
JINA = "jinareader"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from sqlalchemy import asc, desc, or_
|
from sqlalchemy import asc, desc, or_
|
||||||
|
@ -104,7 +104,7 @@ class ConversationService:
|
||||||
return cls.auto_generate_name(app_model, conversation)
|
return cls.auto_generate_name(app_model, conversation)
|
||||||
else:
|
else:
|
||||||
conversation.name = name
|
conversation.name = name
|
||||||
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return conversation
|
return conversation
|
||||||
|
|
|
@ -600,7 +600,7 @@ class DocumentService:
|
||||||
# update document to be paused
|
# update document to be paused
|
||||||
document.is_paused = True
|
document.is_paused = True
|
||||||
document.paused_by = current_user.id
|
document.paused_by = current_user.id
|
||||||
document.paused_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -1072,7 +1072,7 @@ class DocumentService:
|
||||||
document.parsing_completed_at = None
|
document.parsing_completed_at = None
|
||||||
document.cleaning_completed_at = None
|
document.cleaning_completed_at = None
|
||||||
document.splitting_completed_at = None
|
document.splitting_completed_at = None
|
||||||
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
document.created_from = created_from
|
document.created_from = created_from
|
||||||
document.doc_form = document_data["doc_form"]
|
document.doc_form = document_data["doc_form"]
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
|
@ -1409,8 +1409,8 @@ class SegmentService:
|
||||||
word_count=len(content),
|
word_count=len(content),
|
||||||
tokens=tokens,
|
tokens=tokens,
|
||||||
status="completed",
|
status="completed",
|
||||||
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
created_by=current_user.id,
|
created_by=current_user.id,
|
||||||
)
|
)
|
||||||
if document.doc_form == "qa_model":
|
if document.doc_form == "qa_model":
|
||||||
|
@ -1429,7 +1429,7 @@ class SegmentService:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("create segment index failed")
|
logging.exception("create segment index failed")
|
||||||
segment_document.enabled = False
|
segment_document.enabled = False
|
||||||
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment_document.status = "error"
|
segment_document.status = "error"
|
||||||
segment_document.error = str(e)
|
segment_document.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -1481,8 +1481,8 @@ class SegmentService:
|
||||||
word_count=len(content),
|
word_count=len(content),
|
||||||
tokens=tokens,
|
tokens=tokens,
|
||||||
status="completed",
|
status="completed",
|
||||||
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
created_by=current_user.id,
|
created_by=current_user.id,
|
||||||
)
|
)
|
||||||
if document.doc_form == "qa_model":
|
if document.doc_form == "qa_model":
|
||||||
|
@ -1508,7 +1508,7 @@ class SegmentService:
|
||||||
logging.exception("create segment index failed")
|
logging.exception("create segment index failed")
|
||||||
for segment_document in segment_data_list:
|
for segment_document in segment_data_list:
|
||||||
segment_document.enabled = False
|
segment_document.enabled = False
|
||||||
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment_document.status = "error"
|
segment_document.status = "error"
|
||||||
segment_document.error = str(e)
|
segment_document.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -1526,7 +1526,7 @@ class SegmentService:
|
||||||
if segment.enabled != action:
|
if segment.enabled != action:
|
||||||
if not action:
|
if not action:
|
||||||
segment.enabled = action
|
segment.enabled = action
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.disabled_by = current_user.id
|
segment.disabled_by = current_user.id
|
||||||
db.session.add(segment)
|
db.session.add(segment)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -1585,10 +1585,10 @@ class SegmentService:
|
||||||
segment.word_count = len(content)
|
segment.word_count = len(content)
|
||||||
segment.tokens = tokens
|
segment.tokens = tokens
|
||||||
segment.status = "completed"
|
segment.status = "completed"
|
||||||
segment.indexing_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.indexing_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.completed_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.completed_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.updated_by = current_user.id
|
segment.updated_by = current_user.id
|
||||||
segment.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.enabled = True
|
segment.enabled = True
|
||||||
segment.disabled_at = None
|
segment.disabled_at = None
|
||||||
segment.disabled_by = None
|
segment.disabled_by = None
|
||||||
|
@ -1608,7 +1608,7 @@ class SegmentService:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("update segment index failed")
|
logging.exception("update segment index failed")
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.status = "error"
|
segment.status = "error"
|
||||||
segment.error = str(e)
|
segment.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import json
|
import json
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
@ -99,7 +99,7 @@ class ExternalDatasetService:
|
||||||
external_knowledge_api.description = args.get("description", "")
|
external_knowledge_api.description = args.get("description", "")
|
||||||
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
|
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
|
||||||
external_knowledge_api.updated_by = user_id
|
external_knowledge_api.updated_by = user_id
|
||||||
external_knowledge_api.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return external_knowledge_api
|
return external_knowledge_api
|
||||||
|
|
|
@ -77,7 +77,7 @@ class FileService:
|
||||||
mime_type=mimetype,
|
mime_type=mimetype,
|
||||||
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
|
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
|
||||||
created_by=user.id,
|
created_by=user.id,
|
||||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
used=False,
|
used=False,
|
||||||
hash=hashlib.sha3_256(content).hexdigest(),
|
hash=hashlib.sha3_256(content).hexdigest(),
|
||||||
source_url=source_url,
|
source_url=source_url,
|
||||||
|
@ -123,10 +123,10 @@ class FileService:
|
||||||
mime_type="text/plain",
|
mime_type="text/plain",
|
||||||
created_by=current_user.id,
|
created_by=current_user.id,
|
||||||
created_by_role=CreatedByRole.ACCOUNT,
|
created_by_role=CreatedByRole.ACCOUNT,
|
||||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
used=True,
|
used=True,
|
||||||
used_by=current_user.id,
|
used_by=current_user.id,
|
||||||
used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(upload_file)
|
db.session.add(upload_file)
|
||||||
|
|
|
@ -371,7 +371,7 @@ class ModelLoadBalancingService:
|
||||||
|
|
||||||
load_balancing_config.name = name
|
load_balancing_config.name = name
|
||||||
load_balancing_config.enabled = enabled
|
load_balancing_config.enabled = enabled
|
||||||
load_balancing_config.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
load_balancing_config.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
self._clear_credentials_cache(tenant_id, config_id)
|
self._clear_credentials_cache(tenant_id, config_id)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class RecommendAppType(str, Enum):
|
class RecommendAppType(StrEnum):
|
||||||
REMOTE = "remote"
|
REMOTE = "remote"
|
||||||
BUILDIN = "builtin"
|
BUILDIN = "builtin"
|
||||||
DATABASE = "db"
|
DATABASE = "db"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||||
|
@ -115,7 +115,7 @@ class WorkflowService:
|
||||||
workflow.graph = json.dumps(graph)
|
workflow.graph = json.dumps(graph)
|
||||||
workflow.features = json.dumps(features)
|
workflow.features = json.dumps(features)
|
||||||
workflow.updated_by = account.id
|
workflow.updated_by = account.id
|
||||||
workflow.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow.environment_variables = environment_variables
|
workflow.environment_variables = environment_variables
|
||||||
workflow.conversation_variables = conversation_variables
|
workflow.conversation_variables = conversation_variables
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ class WorkflowService:
|
||||||
tenant_id=app_model.tenant_id,
|
tenant_id=app_model.tenant_id,
|
||||||
app_id=app_model.id,
|
app_id=app_model.id,
|
||||||
type=draft_workflow.type,
|
type=draft_workflow.type,
|
||||||
version=str(datetime.now(timezone.utc).replace(tzinfo=None)),
|
version=str(datetime.now(UTC).replace(tzinfo=None)),
|
||||||
graph=draft_workflow.graph,
|
graph=draft_workflow.graph,
|
||||||
features=draft_workflow.features,
|
features=draft_workflow.features,
|
||||||
created_by=account.id,
|
created_by=account.id,
|
||||||
|
@ -257,8 +257,8 @@ class WorkflowService:
|
||||||
workflow_node_execution.elapsed_time = time.perf_counter() - start_at
|
workflow_node_execution.elapsed_time = time.perf_counter() - start_at
|
||||||
workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
|
workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
|
||||||
workflow_node_execution.created_by = account.id
|
workflow_node_execution.created_by = account.id
|
||||||
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
if run_succeeded and node_run_result:
|
if run_succeeded and node_run_result:
|
||||||
# create workflow node execution
|
# create workflow node execution
|
||||||
|
|
|
@ -74,7 +74,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("add document to index failed")
|
logging.exception("add document to index failed")
|
||||||
dataset_document.enabled = False
|
dataset_document.enabled = False
|
||||||
dataset_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
dataset_document.status = "error"
|
dataset_document.status = "error"
|
||||||
dataset_document.error = str(e)
|
dataset_document.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -52,7 +52,7 @@ def enable_annotation_reply_task(
|
||||||
annotation_setting.score_threshold = score_threshold
|
annotation_setting.score_threshold = score_threshold
|
||||||
annotation_setting.collection_binding_id = dataset_collection_binding.id
|
annotation_setting.collection_binding_id = dataset_collection_binding.id
|
||||||
annotation_setting.updated_user_id = user_id
|
annotation_setting.updated_user_id = user_id
|
||||||
annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(annotation_setting)
|
db.session.add(annotation_setting)
|
||||||
else:
|
else:
|
||||||
new_app_annotation_setting = AppAnnotationSetting(
|
new_app_annotation_setting = AppAnnotationSetting(
|
||||||
|
|
|
@ -80,9 +80,9 @@ def batch_create_segment_to_index_task(
|
||||||
word_count=len(content),
|
word_count=len(content),
|
||||||
tokens=tokens,
|
tokens=tokens,
|
||||||
created_by=user_id,
|
created_by=user_id,
|
||||||
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
status="completed",
|
status="completed",
|
||||||
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
if dataset_document.doc_form == "qa_model":
|
if dataset_document.doc_form == "qa_model":
|
||||||
segment_document.answer = segment["answer"]
|
segment_document.answer = segment["answer"]
|
||||||
|
|
|
@ -38,7 +38,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||||
# update segment status to indexing
|
# update segment status to indexing
|
||||||
update_params = {
|
update_params = {
|
||||||
DocumentSegment.status: "indexing",
|
DocumentSegment.status: "indexing",
|
||||||
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
}
|
}
|
||||||
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
|
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -75,7 +75,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||||
# update segment to completed
|
# update segment to completed
|
||||||
update_params = {
|
update_params = {
|
||||||
DocumentSegment.status: "completed",
|
DocumentSegment.status: "completed",
|
||||||
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
}
|
}
|
||||||
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
|
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -87,7 +87,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("create segment to index failed")
|
logging.exception("create segment to index failed")
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.status = "error"
|
segment.status = "error"
|
||||||
segment.error = str(e)
|
segment.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -67,7 +67,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||||
# check the page is updated
|
# check the page is updated
|
||||||
if last_edited_time != page_edited_time:
|
if last_edited_time != page_edited_time:
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
# delete all document segment and index
|
# delete all document segment and index
|
||||||
|
|
|
@ -50,7 +50,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return
|
return
|
||||||
|
@ -64,7 +64,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||||
|
|
||||||
if document:
|
if document:
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
documents.append(document)
|
documents.append(document)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -30,7 +30,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||||
raise NotFound("Document not found")
|
raise NotFound("Document not found")
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
# delete all document segment and index
|
# delete all document segment and index
|
||||||
|
|
|
@ -71,7 +71,7 @@ def enable_segment_to_index_task(segment_id: str):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("enable segment to index failed")
|
logging.exception("enable segment to index failed")
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.status = "error"
|
segment.status = "error"
|
||||||
segment.error = str(e)
|
segment.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import uuid
|
import uuid
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime, timezone
|
||||||
|
|
||||||
from core.workflow.entities.variable_pool import VariablePool
|
from core.workflow.entities.variable_pool import VariablePool
|
||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
|
@ -29,7 +29,7 @@ def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngine
|
||||||
|
|
||||||
|
|
||||||
def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
|
def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
|
||||||
route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
|
route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
|
||||||
|
|
||||||
parallel_id = graph.node_parallel_mapping.get(next_node_id)
|
parallel_id = graph.node_parallel_mapping.get(next_node_id)
|
||||||
parallel_start_node_id = None
|
parallel_start_node_id = None
|
||||||
|
@ -68,7 +68,7 @@ def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEve
|
||||||
)
|
)
|
||||||
|
|
||||||
route_node_state.status = RouteNodeState.Status.SUCCESS
|
route_node_state.status = RouteNodeState.Status.SUCCESS
|
||||||
route_node_state.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
route_node_state.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
yield NodeRunSucceededEvent(
|
yield NodeRunSucceededEvent(
|
||||||
id=node_execution_id,
|
id=node_execution_id,
|
||||||
node_id=next_node_id,
|
node_id=next_node_id,
|
||||||
|
|
Loading…
Reference in New Issue
Block a user