mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
fix: httpx socks package missing (#1977)
This commit is contained in:
parent
7a221d0858
commit
33901384c6
|
@ -260,7 +260,7 @@ provider_credential_schema:
|
|||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_type=model_type,
|
||||
model_properties={
|
||||
'mode': ModelType.LLM,
|
||||
ModelPropertyKey.MODE: ModelType.LLM,
|
||||
},
|
||||
parameter_rules=rules
|
||||
)
|
||||
|
|
|
@ -40,8 +40,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 4096,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 4096,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -84,8 +84,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 16385,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 16385,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -128,8 +128,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 8192,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 8192,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -202,8 +202,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 32768,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 32768,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -276,8 +276,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 128000,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 128000,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -349,8 +349,8 @@ LLM_BASE_MODELS = [
|
|||
],
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.CHAT.value,
|
||||
'context_size': 128000,
|
||||
ModelPropertyKey.MODE: LLMMode.CHAT.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 128000,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -419,8 +419,8 @@ LLM_BASE_MODELS = [
|
|||
model_type=ModelType.LLM,
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.COMPLETION.value,
|
||||
'context_size': 4096,
|
||||
ModelPropertyKey.MODE: LLMMode.COMPLETION.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 4096,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
@ -459,8 +459,8 @@ LLM_BASE_MODELS = [
|
|||
model_type=ModelType.LLM,
|
||||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_properties={
|
||||
'mode': LLMMode.COMPLETION.value,
|
||||
'context_size': 4096,
|
||||
ModelPropertyKey.MODE: LLMMode.COMPLETION.value,
|
||||
ModelPropertyKey.CONTEXT_SIZE: 4096,
|
||||
},
|
||||
parameter_rules=[
|
||||
ParameterRule(
|
||||
|
|
|
@ -10,7 +10,7 @@ from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk,
|
|||
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, \
|
||||
UserPromptMessage, SystemPromptMessage
|
||||
from core.model_runtime.entities.model_entities import ParameterRule, DefaultParameterName, AIModelEntity, ModelType, \
|
||||
FetchFrom
|
||||
FetchFrom, ModelPropertyKey
|
||||
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||
from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub
|
||||
|
@ -97,7 +97,7 @@ class HuggingfaceHubLargeLanguageModel(_CommonHuggingfaceHub, LargeLanguageModel
|
|||
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||
model_type=ModelType.LLM,
|
||||
model_properties={
|
||||
'mode': LLMMode.COMPLETION.value
|
||||
ModelPropertyKey.MODE: LLMMode.COMPLETION.value
|
||||
},
|
||||
parameter_rules=self._get_customizable_model_parameter_rules()
|
||||
)
|
||||
|
|
|
@ -57,8 +57,8 @@ qdrant-client==1.6.4
|
|||
cohere~=4.32
|
||||
pyyaml~=6.0.1
|
||||
numpy~=1.25.2
|
||||
unstructured~=0.10.27
|
||||
unstructured[docx,pptx,msg,md,ppt]~=0.10.27
|
||||
bs4~=0.0.1
|
||||
markdown~=3.5.1
|
||||
google-generativeai~=0.3.2
|
||||
google-generativeai~=0.3.2
|
||||
httpx[socks]~=0.24.1
|
Loading…
Reference in New Issue
Block a user