mirror of
https://github.com/agentuniverse-ai/agentUniverse.git
synced 2026-02-09 01:59:19 +08:00
docs: add llm channel docs.
This commit is contained in:
@@ -98,6 +98,7 @@ class LLM(ComponentBase):
|
||||
|
||||
def as_langchain(self) -> BaseLanguageModel:
|
||||
"""Convert to the langchain llm class."""
|
||||
self.init_channel()
|
||||
if self._channel_instance:
|
||||
return self._channel_instance.as_langchain()
|
||||
pass
|
||||
@@ -139,7 +140,6 @@ class LLM(ComponentBase):
|
||||
self._max_context_length = component_configer.configer.value['max_context_length']
|
||||
if 'channel' in component_configer.configer.value:
|
||||
self.channel = component_configer.configer.value.get('channel')
|
||||
self.init_channel()
|
||||
return self
|
||||
|
||||
def set_by_agent_model(self, **kwargs):
|
||||
|
||||
BIN
docs/guidebook/_picture/llm_channel.jpg
Normal file
BIN
docs/guidebook/_picture/llm_channel.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.6 MiB |
211
docs/guidebook/en/In-Depth_Guides/Tutorials/LLM/LLM_Channel.md
Normal file
211
docs/guidebook/en/In-Depth_Guides/Tutorials/LLM/LLM_Channel.md
Normal file
@@ -0,0 +1,211 @@
|
||||
## Model Channel Definition
|
||||
|
||||
In agentUniverse, model channels are implemented to manage different channels for the same type of model. Through proper configuration of the model channel component, you can easily switch between different channels to smoothly call model services provided by different service providers.
|
||||
|
||||
## Model Channel Call Flow
|
||||
|
||||

|
||||
|
||||
When agentUniverse starts up, during the LLM model component initialization phase, it reads the channel name configured in the llm yaml and connects the configured model channel to the corresponding model component.
|
||||
|
||||
When a model is called, if there is an associated channel, it calls the corresponding model channel's call method, completes the channel connection, and returns the model channel execution result.
|
||||
|
||||
### Specific Code
|
||||
|
||||
Code location: agentuniverse.llm.llm.LLM
|
||||
|
||||
```python
|
||||
def init_channel(self):
|
||||
"""
|
||||
Initialize the model channel method. When users configure the channel parameter in yaml, initialize the model channel and pass model parameters to the specified channel.
|
||||
"""
|
||||
if self.channel and not self._channel_instance:
|
||||
llm_channel: LLMChannel = LLMChannelManager().get_instance_obj(
|
||||
component_instance_name=self.channel)
|
||||
if not llm_channel:
|
||||
return
|
||||
|
||||
self._channel_instance = llm_channel
|
||||
|
||||
llm_attrs = vars(self)
|
||||
channel_model_config = {}
|
||||
|
||||
for attr_name, attr_value in llm_attrs.items():
|
||||
channel_model_config[attr_name] = attr_value
|
||||
|
||||
llm_channel.channel_model_config = channel_model_config
|
||||
|
||||
|
||||
def call(self, *args: Any, **kwargs: Any):
|
||||
try:
|
||||
self.init_channel()
|
||||
# If channel instance exists, execute channel call method
|
||||
if self._channel_instance:
|
||||
return self._channel_instance.call(*args, **kwargs)
|
||||
return self._call(*args, **kwargs)
|
||||
except Exception as e:
|
||||
LOGGER.error(f'Error in LLM call: {e}')
|
||||
raise e
|
||||
|
||||
|
||||
async def acall(self, *args: Any, **kwargs: Any):
|
||||
try:
|
||||
self.init_channel()
|
||||
# If channel instance exists, execute channel acall method
|
||||
if self._channel_instance:
|
||||
return await self._channel_instance.acall(*args, **kwargs)
|
||||
return await self._acall(*args, **kwargs)
|
||||
except Exception as e:
|
||||
LOGGER.error(f'Error in LLM acall: {e}')
|
||||
raise e
|
||||
|
||||
|
||||
def as_langchain(self) -> BaseLanguageModel:
|
||||
"""Convert to the langchain llm class."""
|
||||
self.init_channel()
|
||||
# If channel instance exists, execute channel's langchain model conversion method
|
||||
if self._channel_instance:
|
||||
return self._channel_instance.as_langchain()
|
||||
pass
|
||||
```
|
||||
|
||||
## Model Channel Configurable Parameters
|
||||
|
||||
In agentUniverse, the Model Channel (LLMChannel) class inherits from ComponentBase and includes the following configurable parameters:
|
||||
|
||||
1. `channel_name`: (Required) Corresponds to the aU channel component instance name
|
||||
2. `channel_api_key`: (Required) Corresponds to the specific channel's key, e.g., dashscope key for Baichuan platform; qianfan key for Qianfan platform
|
||||
3. `channel_api_base`: (Required) Corresponds to the specific channel's endpoint
|
||||
4. `channel_organization`: (Optional) Corresponds to the specific channel's organization
|
||||
5. `channel_proxy`: (Optional) Corresponds to the specific channel's proxy
|
||||
6. `channel_model_name`: (Required) Corresponds to the specific channel's model name. For example: deepseek-r1's model_name is deepseek-reasoner on the official website, but deepseek-r1 on the Baichuan platform
|
||||
7. `channel_ext_info`: (Optional) Corresponds to the specific channel's extended information
|
||||
8. `model_support_stream`: (Optional) Determines whether the current channel's model supports streaming. For example, if deepseek-r1 doesn't support streaming calls in a certain channel, this parameter should be set to False to force the streaming switch to be off during channel calls
|
||||
9. `model_support_max_context_length`: (Optional) Determines the maximum context length supported by the current channel's model. For example, if deepseek-r1's maximum context length in a certain channel differs from the official model's context length, this parameter serves as a calibration function
|
||||
10. `model_is_openai_protocol_compatible`: (Optional) Determines whether the current channel's model supports the OpenAI protocol, default is True
|
||||
11. `_channel_model_config`: Inherits the model parameters configured in llm yaml. When llm initializes the channel, these parameters are assigned to the channel
|
||||
|
||||
## Specific Configuration Examples
|
||||
|
||||
### Official Channel
|
||||
|
||||
Each model in aU has an official model channel, such as deepseek-r1-official, qwen-max-official.
|
||||
|
||||
Here we use deepseek-r1 as an example to show its official channel configuration:
|
||||
|
||||
#### deepseek r1 llm yaml configuration
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-official # Configure model channel as official channel
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### deepseek r1 official channel yaml configuration
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-official'
|
||||
channel_api_key: '${DEEPSEEK_OFFICIAL_CHANNEL_API_KEY}' # deepseek-r1 official website key
|
||||
channel_api_base: 'https://api.deepseek.com/v1' # deepseek-r1 official website url
|
||||
channel_model_name: 'deepseek-reasoner' # deepseek-r1 official website model name
|
||||
model_support_stream: True # Whether deepseek-r1 official channel supports streaming
|
||||
model_support_max_context_length: 65792 # Maximum context length supported by deepseek-r1 official channel model
|
||||
model_is_openai_protocol_compatible: True
|
||||
meta_class: 'agentuniverse.llm.llm_channel.deepseek_official_llm_channel.DeepseekOfficialLLMChannel'
|
||||
```
|
||||
|
||||
### Alibaba Cloud Baichuan Channel
|
||||
|
||||
Using deepseek-r1 as an example, here's the specific configuration for the Baichuan channel.
|
||||
|
||||
#### deepseek-r1 model yaml configuration:
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-dashscope # Configure model channel as dashscope channel
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### Baichuan dashscope channel configuration:
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-dashscope'
|
||||
channel_api_key: '${DASHSCOPE_CHANNEL_API_KEY}' # Alibaba Cloud Baichuan platform key
|
||||
channel_api_base: 'https://dashscope.aliyuncs.com/compatible-mode/v1' # Alibaba Cloud Baichuan platform url
|
||||
channel_model_name: deepseek-r1 # Alibaba Cloud Baichuan platform model name
|
||||
model_support_stream: True # Whether Alibaba Cloud Baichuan platform model supports streaming
|
||||
model_support_max_context_length: 65792 # Maximum context length supported by Alibaba Cloud Baichuan platform model
|
||||
model_is_openai_protocol_compatible: True
|
||||
meta_class: 'agentuniverse.llm.llm_channel.dashscope_llm_channel.DashscopeLLMChannel'
|
||||
```
|
||||
|
||||
### Ollama Channel
|
||||
|
||||
Using deepseek-r1 as an example, here's the specific configuration for the Ollama channel.
|
||||
|
||||
#### deepseek-r1 model yaml configuration:
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-ollama # Configure model channel as ollama channel
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### Ollama channel configuration:
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-ollama'
|
||||
channel_api_base: 'http://localhost:11434'
|
||||
channel_model_name: deepseek-r1:671b, # deepseek-r1 model name in ollama channel
|
||||
support_stream: True # Whether deepseek-r1 supports streaming in ollama channel
|
||||
support_max_context_length: 65536 # Maximum context length supported by deepseek-r1 in ollama channel
|
||||
is_openai_protocol_compatible: False # Whether deepseek-r1 is compatible with OpenAI protocol in ollama channel
|
||||
meta_class: 'agentuniverse.llm.llm_channel.ollama_llm_channel.OllamaLLMChannel'
|
||||
```
|
||||
|
||||
## Currently Built-in Model Channels in agentUniverse:
|
||||
|
||||
### official_llm_channel
|
||||
|
||||
Each built-in model in agentUniverse has an official channel, such as:
|
||||
|
||||
[kimi_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/kimi_official_llm_channel.py)
|
||||
[openai_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/openai_official_llm_channel.py)
|
||||
[deepseek_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/deepseek_official_llm_channel.py)
|
||||
|
||||
### dashscope_llm_channel
|
||||
|
||||
Alibaba Cloud Baichuan platform model channel: [dashscope_llm_channel](../../../../../../agentuniverse/llm/llm_channel/dashscope_llm_channel.py)
|
||||
|
||||
Note: The official channel for qwen model is dashscope_llm_channel
|
||||
|
||||
### qianfan_llm_channel
|
||||
|
||||
Baidu Cloud Qianfan platform model channel: [qianfan_llm_channel](../../../../../../agentuniverse/llm/llm_channel/qianfan_llm_channel.py)
|
||||
|
||||
### ollama_llm_channel
|
||||
|
||||
Ollama platform model channel: [ollama_llm_channel](../../../../../../agentuniverse/llm/llm_channel/ollama_llm_channel.py)
|
||||
|
||||
## Usage Recommendations
|
||||
|
||||
1. When configuring model channels, ensure all necessary parameters are correctly set, especially critical parameters like channel_api_key and channel_api_base.
|
||||
2. For different channels, model_support_stream and model_support_max_context_length may vary, so it's recommended to configure them according to actual circumstances.
|
||||
211
docs/guidebook/zh/In-Depth_Guides/原理介绍/模型/模型通道.md
Normal file
211
docs/guidebook/zh/In-Depth_Guides/原理介绍/模型/模型通道.md
Normal file
@@ -0,0 +1,211 @@
|
||||
## 模型通道定义
|
||||
|
||||
在agentUniverse中,通过增加模型通道,实现同种类模型的不同通道管理。借助模型通道组件的合理配置,您能够更便捷地切换不同通道,从而顺畅地调用不同服务商提供的模型服务。
|
||||
|
||||
## 模型通道调用流程
|
||||
|
||||

|
||||
|
||||
当agentUniverse启动时,初始化LLM模型组件阶段,会读取llm yaml中配置的channel名称,并将配置的模型通道连接到对应的模型组件中。
|
||||
|
||||
当模型调用时,如果存在关联通道,则调用对应模型通道的调用方法,完成通道串联,并返回模型通道执行结果。
|
||||
|
||||
### 具体代码
|
||||
|
||||
代码地址:agentuniverse.llm.llm.LLM
|
||||
|
||||
```python
|
||||
def init_channel(self):
|
||||
"""
|
||||
初始化模型通道方法,当用户在yaml中配置channel参数时,初始化模型通道,并传递模型参数到指定通道。
|
||||
"""
|
||||
if self.channel and not self._channel_instance:
|
||||
llm_channel: LLMChannel = LLMChannelManager().get_instance_obj(
|
||||
component_instance_name=self.channel)
|
||||
if not llm_channel:
|
||||
return
|
||||
|
||||
self._channel_instance = llm_channel
|
||||
|
||||
llm_attrs = vars(self)
|
||||
channel_model_config = {}
|
||||
|
||||
for attr_name, attr_value in llm_attrs.items():
|
||||
channel_model_config[attr_name] = attr_value
|
||||
|
||||
llm_channel.channel_model_config = channel_model_config
|
||||
|
||||
|
||||
def call(self, *args: Any, **kwargs: Any):
|
||||
try:
|
||||
self.init_channel()
|
||||
# 通道实例不为空,执行通道call方法
|
||||
if self._channel_instance:
|
||||
return self._channel_instance.call(*args, **kwargs)
|
||||
return self._call(*args, **kwargs)
|
||||
except Exception as e:
|
||||
LOGGER.error(f'Error in LLM call: {e}')
|
||||
raise e
|
||||
|
||||
|
||||
async def acall(self, *args: Any, **kwargs: Any):
|
||||
try:
|
||||
self.init_channel()
|
||||
# 通道实例不为空,执行通道acall方法
|
||||
if self._channel_instance:
|
||||
return await self._channel_instance.acall(*args, **kwargs)
|
||||
return await self._acall(*args, **kwargs)
|
||||
except Exception as e:
|
||||
LOGGER.error(f'Error in LLM acall: {e}')
|
||||
raise e
|
||||
|
||||
|
||||
def as_langchain(self) -> BaseLanguageModel:
|
||||
"""Convert to the langchain llm class."""
|
||||
self.init_channel()
|
||||
# 通道实例不为空,执行通道的langchain模型转换方法
|
||||
if self._channel_instance:
|
||||
return self._channel_instance.as_langchain()
|
||||
pass
|
||||
```
|
||||
|
||||
## 模型通道可配置参数
|
||||
|
||||
在agentUniverse中,模型通道(LLMChannel)类继承自ComponentBase,包含以下可配置参数:
|
||||
|
||||
1. `channel_name`:(必填)对应aU的通道组件实例名称
|
||||
2. `channel_api_key`:(必填)对应特定通道的密钥,比如百炼平台对应dashscope key;千帆平台对应qianfan key
|
||||
3. `channel_api_base`:(必填)对应特定通道的endpoint
|
||||
4. `channel_organization`:(非必填)对应特定通道的organization
|
||||
5. `channel_proxy`:(非必填)对应特定通道的proxy
|
||||
6. `channel_model_name`:(必填)对应特定通道的模型名称。这里举例说明:比如deepseek-r1在官网的model_name为deepseek-reasoner,在百炼平台的模型名称为deepseek-r1
|
||||
7. `channel_ext_info`:(非必填)对应特定通道的扩展信息
|
||||
8. `model_support_stream`:(非必填)判断当前通道的模型是否支持流式,比如deepseek-r1在某个通道不支持流式调用,则该参数值为False,强制通道调用时,流式开关关闭
|
||||
9. `model_support_max_context_length`:(非必填)判断当前通道的模型支持的最大窗口大小,比如deepseek-r1在某个通道的最大窗口大小与官方模型窗口大小不同,则该参数值起到校准功能。
|
||||
10. `model_is_openai_protocol_compatible`:(非必填)判断当前通道的模型是否支持openai协议,默认为True。
|
||||
11. `_channel_model_config`:承接llm yaml配置的模型参数,当llm初始化通道时,会赋值到通道。
|
||||
|
||||
## 具体配置示例
|
||||
|
||||
### 官方通道
|
||||
|
||||
aU中每个模型都存在一个官方模型通道,比如deepseek-r1-official、qwen-max-official。
|
||||
|
||||
这里以deepseek-r1为例,展示deepseek-r1的官方通道配置信息:
|
||||
|
||||
#### deepseek r1 llm yaml配置
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-official # 配置模型通道为官方通道
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### deepseek r1 official channel yaml配置
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-official'
|
||||
channel_api_key: '${DEEPSEEK_OFFICIAL_CHANNEL_API_KEY}' # deepseek-r1官网的密钥
|
||||
channel_api_base: 'https://api.deepseek.com/v1' # deepseek-r1官网的url
|
||||
channel_model_name: 'deepseek-reasoner' # deepseek-r1官网的模型名称
|
||||
model_support_stream: True # deepseek-r1官网通道是否支持流式
|
||||
model_support_max_context_length: 65792 # deepseek-r1官网通道模型支持的最大窗口大小
|
||||
model_is_openai_protocol_compatible: True
|
||||
meta_class: 'agentuniverse.llm.llm_channel.deepseek_official_llm_channel.DeepseekOfficialLLMChannel'
|
||||
```
|
||||
|
||||
### 阿里云百炼通道
|
||||
|
||||
以deepseek-r1为例,列举在百炼通道的具体配置。
|
||||
|
||||
#### deepseek-r1模型yaml配置信息如下:
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-dashscope # 配置模型通道为dashscope通道
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### 百炼dashscope通道配置如下:
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-dashscope'
|
||||
channel_api_key: '${DASHSCOPE_CHANNEL_API_KEY}' # 阿里云百炼平台的密钥
|
||||
channel_api_base: 'https://dashscope.aliyuncs.com/compatible-mode/v1' # 阿里云百炼平台的url
|
||||
channel_model_name: deepseek-r1 # 阿里云百炼平台的模型名称
|
||||
model_support_stream: True # 阿里云百炼平台模型是否支持流式
|
||||
model_support_max_context_length: 65792 # 阿里云百炼平台模型支持的最大窗口大小
|
||||
model_is_openai_protocol_compatible: True
|
||||
meta_class: 'agentuniverse.llm.llm_channel.dashscope_llm_channel.DashscopeLLMChannel'
|
||||
```
|
||||
|
||||
### ollama通道
|
||||
|
||||
以deepseek-r1为例,列举在ollama通道的具体配置。
|
||||
|
||||
#### deepseek-r1模型yaml配置信息如下:
|
||||
|
||||
```yaml
|
||||
name: 'deepseek-reasoner'
|
||||
description: 'deepseek-reasoner'
|
||||
model_name: 'deepseek-reasoner'
|
||||
temperature: 0.5
|
||||
max_tokens: 2000
|
||||
max_context_length: 65792
|
||||
streaming: True
|
||||
channel: deepseek-r1-ollama # 配置模型通道为ollama通道
|
||||
meta_class: 'agentuniverse.llm.default.deep_seek_openai_style_llm.DefaultDeepSeekLLM'
|
||||
```
|
||||
|
||||
#### ollama通道配置如下:
|
||||
|
||||
```yaml
|
||||
channel_name: 'deepseek-r1-ollama'
|
||||
channel_api_base: 'http://localhost:11434'
|
||||
channel_model_name: deepseek-r1:671b, # deepseek-r1对应ollama通道的模型名称
|
||||
support_stream: True # deepseek-r1对应ollama通道是否支持流式
|
||||
support_max_context_length: 65536 # deepseek-r1对应ollama通道支持的最大窗口长度
|
||||
is_openai_protocol_compatible: False # deepseek-r1对应ollama通道是否适配openai协议
|
||||
meta_class: 'agentuniverse.llm.llm_channel.ollama_llm_channel.OllamaLLMChannel'
|
||||
```
|
||||
|
||||
## agentUniverse目前内置有以下模型通道:
|
||||
|
||||
### official_llm_channel
|
||||
|
||||
每个agentUniverse的内置模型都存在一个官方通道,比如
|
||||
|
||||
[kimi_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/kimi_official_llm_channel.py)
|
||||
[openai_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/openai_official_llm_channel.py)
|
||||
[deepseek_official_llm_channel](../../../../../../agentuniverse/llm/llm_channel/deepseek_official_llm_channel.py)
|
||||
|
||||
### dashscope_llm_channel
|
||||
|
||||
阿里云百炼平台模型通道:[dashscope_llm_channel](../../../../../../agentuniverse/llm/llm_channel/dashscope_llm_channel.py)
|
||||
|
||||
注意,qwen模型的官方通道为dashscope_llm_channel
|
||||
|
||||
### qianfan_llm_channel
|
||||
|
||||
百度云千帆平台模型通道:[qianfan_llm_channel](../../../../../../agentuniverse/llm/llm_channel/qianfan_llm_channel.py)
|
||||
|
||||
### ollama_llm_channel
|
||||
|
||||
Ollama平台模型通道:[ollama_llm_channel](../../../../../../agentuniverse/llm/llm_channel/ollama_llm_channel.py)
|
||||
|
||||
## 使用建议
|
||||
|
||||
1. 在配置模型通道时,请确保所有必要的参数都已正确设置,特别是channel_api_key和channel_api_base这些关键参数。
|
||||
2. 对于不同的通道,model_support_stream和model_support_max_context_length可能会有所不同,建议根据实际情况进行配置。
|
||||
Reference in New Issue
Block a user