mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
add message clean task
This commit is contained in:
parent
16b9665033
commit
a4ab8f225c
|
@ -616,6 +616,11 @@ class DataSetConfig(BaseSettings):
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field(
|
||||||
|
description="Interval in days for message cleanup operations - plan: sandbox",
|
||||||
|
default=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceConfig(BaseSettings):
|
class WorkspaceConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -68,6 +68,7 @@ def init_app(app: Flask) -> Celery:
|
||||||
"schedule.clean_unused_datasets_task",
|
"schedule.clean_unused_datasets_task",
|
||||||
"schedule.create_tidb_serverless_task",
|
"schedule.create_tidb_serverless_task",
|
||||||
"schedule.update_tidb_serverless_status_task",
|
"schedule.update_tidb_serverless_status_task",
|
||||||
|
"schedule.clean_messages",
|
||||||
]
|
]
|
||||||
day = dify_config.CELERY_BEAT_SCHEDULER_TIME
|
day = dify_config.CELERY_BEAT_SCHEDULER_TIME
|
||||||
beat_schedule = {
|
beat_schedule = {
|
||||||
|
@ -87,6 +88,10 @@ def init_app(app: Flask) -> Celery:
|
||||||
"task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task",
|
"task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task",
|
||||||
"schedule": crontab(minute="30", hour="*"),
|
"schedule": crontab(minute="30", hour="*"),
|
||||||
},
|
},
|
||||||
|
"clean_messages": {
|
||||||
|
"task": "schedule.clean_messages.clean_messages",
|
||||||
|
"schedule": timedelta(days=day),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
|
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
|
||||||
|
|
||||||
|
|
|
@ -719,6 +719,7 @@ class Message(db.Model):
|
||||||
db.Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
db.Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||||
db.Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
db.Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
||||||
db.Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
db.Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||||
|
db.Index("message_created_at_idx", "created_at"),
|
||||||
)
|
)
|
||||||
|
|
||||||
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||||
|
|
64
api/schedule/clean_messages.py
Normal file
64
api/schedule/clean_messages.py
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
import app
|
||||||
|
from configs import dify_config
|
||||||
|
from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService
|
||||||
|
from extensions.ext_database import db
|
||||||
|
from models.account import Tenant
|
||||||
|
from models.model import App, Message, MessageAgentThought, MessageAnnotation, MessageChain, MessageFeedback, MessageFile
|
||||||
|
from models.web import SavedMessage
|
||||||
|
from services.feature_service import FeatureService
|
||||||
|
from extensions.ext_redis import redis_client
|
||||||
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
|
@app.celery.task(queue="dataset")
|
||||||
|
def clean_messages():
|
||||||
|
click.echo(click.style("Start clean messages.", fg="green"))
|
||||||
|
start_at = time.perf_counter()
|
||||||
|
plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta(days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING)
|
||||||
|
page = 1
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# Main query with join and filter
|
||||||
|
messages = (
|
||||||
|
db.session.query(Message)
|
||||||
|
.filter(Message.created_at < plan_sandbox_clean_message_day)
|
||||||
|
.order_by(Message.created_at.desc())
|
||||||
|
.paginate(page=page, per_page=100)
|
||||||
|
)
|
||||||
|
|
||||||
|
except NotFound:
|
||||||
|
break
|
||||||
|
if messages.items is None or len(messages.items) == 0:
|
||||||
|
break
|
||||||
|
for message in messages.items:
|
||||||
|
app = App.query.filter_by(id=message.app_id).first()
|
||||||
|
features_cache_key = f"features:{app.tenant_id}"
|
||||||
|
plan_cache = redis_client.get(features_cache_key)
|
||||||
|
if plan_cache is None:
|
||||||
|
features = FeatureService.get_features(app.tenant_id)
|
||||||
|
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
|
||||||
|
plan = features.billing.subscription.plan
|
||||||
|
else:
|
||||||
|
plan = plan_cache.decode()
|
||||||
|
if plan == "sandbox":
|
||||||
|
# clean related message
|
||||||
|
db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message.id).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message.id).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
db.session.query(MessageChain).filter(MessageChain.message_id == message.id).delete(synchronize_session=False)
|
||||||
|
db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message.id).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
db.session.query(MessageFile).filter(MessageFile.message_id == message.id).delete(synchronize_session=False)
|
||||||
|
db.session.query(SavedMessage).filter(SavedMessage.message_id == message.id).delete(synchronize_session=False)
|
||||||
|
db.session.query(Message).filter(Message.id == message.id).delete()
|
||||||
|
db.session.commit()
|
||||||
|
end_at = time.perf_counter()
|
||||||
|
click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green"))
|
|
@ -22,7 +22,6 @@ def clean_unused_datasets_task():
|
||||||
start_at = time.perf_counter()
|
start_at = time.perf_counter()
|
||||||
plan_sandbox_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_sandbox_clean_day_setting)
|
plan_sandbox_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_sandbox_clean_day_setting)
|
||||||
plan_pro_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_pro_clean_day_setting)
|
plan_pro_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_pro_clean_day_setting)
|
||||||
page = 1
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
# Subquery for counting new documents
|
# Subquery for counting new documents
|
||||||
|
@ -62,14 +61,13 @@ def clean_unused_datasets_task():
|
||||||
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
||||||
)
|
)
|
||||||
.order_by(Dataset.created_at.desc())
|
.order_by(Dataset.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=1, per_page=50)
|
||||||
)
|
)
|
||||||
|
|
||||||
except NotFound:
|
except NotFound:
|
||||||
break
|
break
|
||||||
if datasets.items is None or len(datasets.items) == 0:
|
if datasets.items is None or len(datasets.items) == 0:
|
||||||
break
|
break
|
||||||
page += 1
|
|
||||||
for dataset in datasets:
|
for dataset in datasets:
|
||||||
dataset_query = (
|
dataset_query = (
|
||||||
db.session.query(DatasetQuery)
|
db.session.query(DatasetQuery)
|
||||||
|
@ -92,7 +90,6 @@ def clean_unused_datasets_task():
|
||||||
click.echo(
|
click.echo(
|
||||||
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
|
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
|
||||||
)
|
)
|
||||||
page = 1
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
# Subquery for counting new documents
|
# Subquery for counting new documents
|
||||||
|
@ -132,14 +129,13 @@ def clean_unused_datasets_task():
|
||||||
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
||||||
)
|
)
|
||||||
.order_by(Dataset.created_at.desc())
|
.order_by(Dataset.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=1, per_page=50)
|
||||||
)
|
)
|
||||||
|
|
||||||
except NotFound:
|
except NotFound:
|
||||||
break
|
break
|
||||||
if datasets.items is None or len(datasets.items) == 0:
|
if datasets.items is None or len(datasets.items) == 0:
|
||||||
break
|
break
|
||||||
page += 1
|
|
||||||
for dataset in datasets:
|
for dataset in datasets:
|
||||||
dataset_query = (
|
dataset_query = (
|
||||||
db.session.query(DatasetQuery)
|
db.session.query(DatasetQuery)
|
||||||
|
@ -149,11 +145,13 @@ def clean_unused_datasets_task():
|
||||||
if not dataset_query or len(dataset_query) == 0:
|
if not dataset_query or len(dataset_query) == 0:
|
||||||
try:
|
try:
|
||||||
features_cache_key = f"features:{dataset.tenant_id}"
|
features_cache_key = f"features:{dataset.tenant_id}"
|
||||||
plan = redis_client.get(features_cache_key)
|
plan_cache = redis_client.get(features_cache_key)
|
||||||
if plan is None:
|
if plan_cache is None:
|
||||||
features = FeatureService.get_features(dataset.tenant_id)
|
features = FeatureService.get_features(dataset.tenant_id)
|
||||||
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
|
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
|
||||||
plan = features.billing.subscription.plan
|
plan = features.billing.subscription.plan
|
||||||
|
else:
|
||||||
|
plan = plan_cache.decode()
|
||||||
if plan == "sandbox":
|
if plan == "sandbox":
|
||||||
# remove index
|
# remove index
|
||||||
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
|
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user