Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Enhance PR #787 #871

Draft
wants to merge 9 commits into
base: master
Choose a base branch
from
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from .togetherai_config import TOGETHERAI_API_PARAMS, TogetherAIConfig
from .vllm_config import VLLM_API_PARAMS, VLLMConfig
from .zhipuai_config import ZHIPUAI_API_PARAMS, ZhipuAIConfig
from .internlm_config import INTERNLM_API_PARAMS, InternLMConfig

__all__ = [
'BaseConfig',
Expand All @@ -49,4 +50,6 @@
'SAMBA_API_PARAMS',
'TogetherAIConfig',
'TOGETHERAI_API_PARAMS',
'InternLMConfig',
'INTERNLM_API_PARAMS',
]
62 changes: 62 additions & 0 deletions camel/configs/internlm_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

from __future__ import annotations

from typing import Any, Dict

from pydantic import Field

from camel.configs.base_config import BaseConfig


class InternLMConfig(BaseConfig):
"""Defines the parameters for generating chat completions using InternLM.

Args:
operation_mode (str, optional): Operation mode for InternLM, currently
support `"chat"`, `"write_webpage"`, `"resume_2_webpage"`,
`"write_article"`. (default: :obj:`"chat"`)
device_map (str, optional): Device for the model running,and it allows
Hugging Face to automatically select the best device, whether it's
a GPU or CPU.
model_kwargs (dict, optional): Model configuration for InternLM model.
tokenizer_kwargs (dict, optional): Tokenizer configuration for InternLM
model.
"""

import torch

operation_mode: str = "chat"
device_map: str = "auto"
max_tokens: int = 4096

model_config: Dict[str, Any] = Field(
default={
"torch_dtype": torch.bfloat16,
"trust_remote_code": True,
"do_sample": False,
"num_beams": 3,
"use_meta": True,
}
)
tokenizer_config: Dict[str, Any] = Field(
default={
"trust_remote_code": True,
}
)

def as_dict(self) -> Dict[str, Any]:
config_dict = super().as_dict()
return config_dict
13 changes: 6 additions & 7 deletions camel/messages/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ class BaseMessage:
role_type: RoleType
meta_dict: Optional[Dict[str, str]]
content: str

video_bytes: Optional[bytes] = None
image_list: Optional[List[Image.Image]] = None
image_detail: Literal["auto", "low", "high"] = "auto"
Expand Down Expand Up @@ -270,8 +269,8 @@ def to_openai_user_message(self) -> OpenAIUserMessage:
Returns:
OpenAIUserMessage: The converted :obj:`OpenAIUserMessage` object.
"""
hybird_content: List[Any] = []
hybird_content.append(
hybrid_content: List[Any] = []
hybrid_content.append(
{
"type": "text",
"text": self.content,
Expand Down Expand Up @@ -300,7 +299,7 @@ def to_openai_user_message(self) -> OpenAIUserMessage:
"utf-8"
)
image_prefix = f"data:image/{image_type};base64,"
hybird_content.append(
hybrid_content.append(
{
"type": "image_url",
"image_url": {
Expand Down Expand Up @@ -359,12 +358,12 @@ def to_openai_user_message(self) -> OpenAIUserMessage:
},
}

hybird_content.append(item)
hybrid_content.append(item)

if len(hybird_content) > 1:
if len(hybrid_content) > 1:
return {
"role": "user",
"content": hybird_content,
"content": hybrid_content,
}
# This return just for str message
else:
Expand Down
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from .base_model import BaseModelBackend
from .gemini_model import GeminiModel
from .groq_model import GroqModel
from .huggingface_model import HuggingFaceModel
from .litellm_model import LiteLLMModel
from .mistral_model import MistralModel
from .model_factory import ModelFactory
Expand Down Expand Up @@ -48,6 +49,7 @@
'OllamaModel',
'VLLMModel',
'GeminiModel',
'HuggingFaceModel',
'OpenAICompatibilityModel',
'SambaModel',
'TogetherAIModel',
Expand Down
Loading
Loading