mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 19:59:50 +08:00
247 lines
5.5 KiB
TypeScript
247 lines
5.5 KiB
TypeScript
import type { AgentStrategy, ModelModeType, RETRIEVE_TYPE, ToolItem, TtsAutoPlay } from '@/types/app'
|
|
import type {
|
|
RerankingModeEnum,
|
|
} from '@/models/datasets'
|
|
import type { FileUpload } from '@/app/components/base/features/types'
|
|
export type Inputs = Record<string, string | number | object>
|
|
|
|
export enum PromptMode {
|
|
simple = 'simple',
|
|
advanced = 'advanced',
|
|
}
|
|
|
|
export interface PromptItem {
|
|
role?: PromptRole
|
|
text: string
|
|
}
|
|
|
|
export interface ChatPromptConfig {
|
|
prompt: PromptItem[]
|
|
}
|
|
|
|
export interface ConversationHistoriesRole {
|
|
user_prefix: string
|
|
assistant_prefix: string
|
|
}
|
|
export interface CompletionPromptConfig {
|
|
prompt: PromptItem
|
|
conversation_histories_role: ConversationHistoriesRole
|
|
}
|
|
|
|
export interface BlockStatus {
|
|
context: boolean
|
|
history: boolean
|
|
query: boolean
|
|
}
|
|
|
|
export enum PromptRole {
|
|
system = 'system',
|
|
user = 'user',
|
|
assistant = 'assistant',
|
|
}
|
|
|
|
export interface PromptVariable {
|
|
key: string
|
|
name: string
|
|
type: string // "string" | "number" | "select",
|
|
default?: string | number
|
|
required?: boolean
|
|
options?: string[]
|
|
max_length?: number
|
|
is_context_var?: boolean
|
|
enabled?: boolean
|
|
config?: Record<string, any>
|
|
icon?: string
|
|
icon_background?: string
|
|
}
|
|
|
|
export interface CompletionParams {
|
|
max_tokens: number
|
|
temperature: number
|
|
top_p: number
|
|
presence_penalty: number
|
|
frequency_penalty: number
|
|
stop?: string[]
|
|
}
|
|
|
|
export type ModelId = 'gpt-3.5-turbo' | 'text-davinci-003'
|
|
|
|
export interface PromptConfig {
|
|
prompt_template: string
|
|
prompt_variables: PromptVariable[]
|
|
}
|
|
|
|
export interface MoreLikeThisConfig {
|
|
enabled: boolean
|
|
}
|
|
|
|
export type SuggestedQuestionsAfterAnswerConfig = MoreLikeThisConfig
|
|
|
|
export type SpeechToTextConfig = MoreLikeThisConfig
|
|
|
|
export interface TextToSpeechConfig {
|
|
enabled: boolean
|
|
voice?: string
|
|
language?: string
|
|
autoPlay?: TtsAutoPlay
|
|
}
|
|
|
|
export type CitationConfig = MoreLikeThisConfig
|
|
|
|
export interface AnnotationReplyConfig {
|
|
id: string
|
|
enabled: boolean
|
|
score_threshold: number
|
|
embedding_model: {
|
|
embedding_provider_name: string
|
|
embedding_model_name: string
|
|
}
|
|
}
|
|
|
|
export interface ModerationContentConfig {
|
|
enabled: boolean
|
|
preset_response?: string
|
|
}
|
|
export type ModerationConfig = MoreLikeThisConfig & {
|
|
type?: string
|
|
config?: {
|
|
keywords?: string
|
|
api_based_extension_id?: string
|
|
inputs_config?: ModerationContentConfig
|
|
outputs_config?: ModerationContentConfig
|
|
} & Partial<Record<string, any>>
|
|
}
|
|
|
|
export type RetrieverResourceConfig = MoreLikeThisConfig
|
|
export interface AgentConfig {
|
|
enabled: boolean
|
|
strategy: AgentStrategy
|
|
max_iteration: number
|
|
tools: ToolItem[]
|
|
}
|
|
// frontend use. Not the same as backend
|
|
export interface ModelConfig {
|
|
provider: string // LLM Provider: for example "OPENAI"
|
|
model_id: string
|
|
mode: ModelModeType
|
|
configs: PromptConfig
|
|
opening_statement: string | null
|
|
more_like_this: MoreLikeThisConfig | null
|
|
suggested_questions: string[] | null
|
|
suggested_questions_after_answer: SuggestedQuestionsAfterAnswerConfig | null
|
|
speech_to_text: SpeechToTextConfig | null
|
|
text_to_speech: TextToSpeechConfig | null
|
|
file_upload: FileUpload | null
|
|
retriever_resource: RetrieverResourceConfig | null
|
|
sensitive_word_avoidance: ModerationConfig | null
|
|
annotation_reply: AnnotationReplyConfig | null
|
|
dataSets: any[]
|
|
agentConfig: AgentConfig
|
|
}
|
|
export interface DatasetConfigItem {
|
|
enable: boolean
|
|
value: number
|
|
}
|
|
|
|
export interface DatasetConfigs {
|
|
retrieval_model: RETRIEVE_TYPE
|
|
reranking_model: {
|
|
reranking_provider_name: string
|
|
reranking_model_name: string
|
|
}
|
|
top_k: number
|
|
score_threshold_enabled: boolean
|
|
score_threshold: number | null | undefined
|
|
datasets: {
|
|
datasets: {
|
|
enabled: boolean
|
|
id: string
|
|
}[]
|
|
}
|
|
reranking_mode?: RerankingModeEnum
|
|
weights?: {
|
|
vector_setting: {
|
|
vector_weight: number
|
|
embedding_provider_name: string
|
|
embedding_model_name: string
|
|
}
|
|
keyword_setting: {
|
|
keyword_weight: number
|
|
}
|
|
}
|
|
reranking_enable?: boolean
|
|
}
|
|
|
|
export interface DebugRequestBody {
|
|
inputs: Inputs
|
|
query: string
|
|
completion_params: CompletionParams
|
|
model_config: ModelConfig
|
|
}
|
|
|
|
export interface DebugResponse {
|
|
id: string
|
|
answer: string
|
|
created_at: string
|
|
}
|
|
|
|
export interface DebugResponseStream {
|
|
id: string
|
|
data: string
|
|
created_at: string
|
|
}
|
|
|
|
export interface FeedBackRequestBody {
|
|
message_id: string
|
|
rating: 'like' | 'dislike'
|
|
content?: string
|
|
from_source: 'api' | 'log'
|
|
}
|
|
|
|
export interface FeedBackResponse {
|
|
message_id: string
|
|
rating: 'like' | 'dislike'
|
|
}
|
|
|
|
// Log session list
|
|
export interface LogSessionListQuery {
|
|
keyword?: string
|
|
start?: string // format datetime(YYYY-mm-dd HH:ii)
|
|
end?: string // format datetime(YYYY-mm-dd HH:ii)
|
|
page: number
|
|
limit: number // default 20. 1-100
|
|
}
|
|
|
|
export interface LogSessionListResponse {
|
|
data: {
|
|
id: string
|
|
conversation_id: string
|
|
query: string // user's query question
|
|
message: string // prompt send to LLM
|
|
answer: string
|
|
created_at: string
|
|
}[]
|
|
total: number
|
|
page: number
|
|
}
|
|
|
|
// log session detail and debug
|
|
export interface LogSessionDetailResponse {
|
|
id: string
|
|
conversation_id: string
|
|
model_provider: string
|
|
query: string
|
|
inputs: Record<string, string | number | object>[]
|
|
message: string
|
|
message_tokens: number // number of tokens in message
|
|
answer: string
|
|
answer_tokens: number // number of tokens in answer
|
|
provider_response_latency: number // used time in ms
|
|
from_source: 'api' | 'log'
|
|
}
|
|
|
|
export interface SavedMessage {
|
|
id: string
|
|
answer: string
|
|
}
|