From 294f2cc3a98c7d3f8b401ef2941baf0e2f5524e9 Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 10:23:46 -0300 Subject: [PATCH 1/6] Add @persist decorator with FlowPersistence interface (#1892) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add @persist decorator with SQLite persistence - Add FlowPersistence abstract base class - Implement SQLiteFlowPersistence backend - Add @persist decorator for flow state persistence - Add tests for flow persistence functionality Co-Authored-By: Joe Moura * Fix remaining merge conflicts in uv.lock - Remove stray merge conflict markers - Keep main's comprehensive platform-specific resolution markers - Preserve all required dependencies for persistence functionality Co-Authored-By: Joe Moura * Fix final CUDA dependency conflicts in uv.lock - Resolve NVIDIA CUDA solver dependency conflicts - Use main's comprehensive platform checks - Ensure all merge conflict markers are removed - Preserve persistence-related dependencies Co-Authored-By: Joe Moura * Fix nvidia-cusparse-cu12 dependency conflicts in uv.lock - Resolve NVIDIA CUSPARSE dependency conflicts - Use main's comprehensive platform checks - Complete systematic check of entire uv.lock file - Ensure all merge conflict markers are removed Co-Authored-By: Joe Moura * Fix triton filelock dependency conflicts in uv.lock - Resolve triton package filelock dependency conflict - Use main's comprehensive platform checks - Complete final systematic check of entire uv.lock file - Ensure TOML file structure is valid Co-Authored-By: Joe Moura * Fix merge conflict in crew_test.py - Remove duplicate assertion in test_multimodal_agent_live_image_analysis - Clean up conflict markers - Preserve test functionality Co-Authored-By: Joe Moura * Clean up trailing merge conflict marker in crew_test.py - Remove remaining conflict marker at end of file - Preserve test functionality - Complete conflict resolution Co-Authored-By: Joe Moura * Improve type safety in persistence implementation and resolve merge conflicts Co-Authored-By: Joe Moura * fix: Add explicit type casting in _create_initial_state method Co-Authored-By: Joe Moura * fix: Improve type safety in flow state handling with proper validation Co-Authored-By: Joe Moura * fix: Improve type system with proper TypeVar scoping and validation Co-Authored-By: Joe Moura * fix: Improve state restoration logic and add comprehensive tests Co-Authored-By: Joe Moura * fix: Initialize FlowState instances without passing id to constructor Co-Authored-By: Joe Moura * feat: Add class-level flow persistence decorator with SQLite default - Add class-level @persist decorator support - Set SQLiteFlowPersistence as default backend - Use db_storage_path for consistent database location - Improve async method handling and type safety - Add comprehensive docstrings and examples Co-Authored-By: Joe Moura * fix: Sort imports in decorators.py to fix lint error Co-Authored-By: Joe Moura * style: Organize imports according to PEP 8 standard Co-Authored-By: Joe Moura * style: Format typing imports with line breaks for better readability Co-Authored-By: Joe Moura * style: Simplify import organization to fix lint error Co-Authored-By: Joe Moura * style: Fix import sorting using Ruff auto-fix Co-Authored-By: Joe Moura --------- Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Co-authored-by: Joe Moura Co-authored-by: João Moura --- src/crewai/flow/flow.py | 390 ++++++++++++++++---- src/crewai/flow/persistence/__init__.py | 18 + src/crewai/flow/persistence/base.py | 53 +++ src/crewai/flow/persistence/decorators.py | 177 +++++++++ src/crewai/flow/persistence/sqlite.py | 124 +++++++ src/crewai/utilities/paths.py | 10 +- tests/cassettes/test_agent_human_input.yaml | 188 +++++++--- tests/crew_test.py | 1 + tests/test_flow_persistence.py | 195 ++++++++++ uv.lock | 49 +-- 10 files changed, 1064 insertions(+), 141 deletions(-) create mode 100644 src/crewai/flow/persistence/__init__.py create mode 100644 src/crewai/flow/persistence/base.py create mode 100644 src/crewai/flow/persistence/decorators.py create mode 100644 src/crewai/flow/persistence/sqlite.py create mode 100644 tests/test_flow_persistence.py diff --git a/src/crewai/flow/flow.py b/src/crewai/flow/flow.py index f10626ce4f..ef688b9c1d 100644 --- a/src/crewai/flow/flow.py +++ b/src/crewai/flow/flow.py @@ -1,5 +1,6 @@ import asyncio import inspect +import uuid from typing import ( Any, Callable, @@ -12,6 +13,7 @@ TypeVar, Union, cast, + overload, ) from uuid import uuid4 @@ -25,6 +27,8 @@ MethodExecutionStartedEvent, ) from crewai.flow.flow_visualizer import plot_flow +from crewai.flow.persistence import FlowPersistence +from crewai.flow.persistence.base import FlowPersistence from crewai.flow.utils import get_possible_return_constants from crewai.telemetry import Telemetry @@ -33,7 +37,46 @@ class FlowState(BaseModel): """Base model for all flow states, ensuring each state has a unique ID.""" id: str = Field(default_factory=lambda: str(uuid4()), description="Unique identifier for the flow state") -T = TypeVar("T", bound=Union[FlowState, Dict[str, Any]]) +# Type variables with explicit bounds +T = TypeVar("T", bound=Union[Dict[str, Any], BaseModel]) # Generic flow state type parameter +StateT = TypeVar("StateT", bound=Union[Dict[str, Any], BaseModel]) # State validation type parameter + +def ensure_state_type(state: Any, expected_type: Type[StateT]) -> StateT: + """Ensure state matches expected type with proper validation. + + Args: + state: State instance to validate + expected_type: Expected type for the state + + Returns: + Validated state instance + + Raises: + TypeError: If state doesn't match expected type + ValueError: If state validation fails + """ + """Ensure state matches expected type with proper validation. + + Args: + state: State instance to validate + expected_type: Expected type for the state + + Returns: + Validated state instance + + Raises: + TypeError: If state doesn't match expected type + ValueError: If state validation fails + """ + if expected_type == dict: + if not isinstance(state, dict): + raise TypeError(f"Expected dict, got {type(state).__name__}") + return cast(StateT, state) + if isinstance(expected_type, type) and issubclass(expected_type, BaseModel): + if not isinstance(state, expected_type): + raise TypeError(f"Expected {expected_type.__name__}, got {type(state).__name__}") + return cast(StateT, state) + raise TypeError(f"Invalid expected_type: {expected_type}") def start(condition: Optional[Union[str, dict, Callable]] = None) -> Callable: @@ -326,21 +369,27 @@ def __new__(mcs, name, bases, dct): routers = set() for attr_name, attr_value in dct.items(): - if hasattr(attr_value, "__is_start_method__"): - start_methods.append(attr_name) + # Check for any flow-related attributes + if (hasattr(attr_value, "__is_flow_method__") or + hasattr(attr_value, "__is_start_method__") or + hasattr(attr_value, "__trigger_methods__") or + hasattr(attr_value, "__is_router__")): + + # Register start methods + if hasattr(attr_value, "__is_start_method__"): + start_methods.append(attr_name) + + # Register listeners and routers if hasattr(attr_value, "__trigger_methods__"): methods = attr_value.__trigger_methods__ condition_type = getattr(attr_value, "__condition_type__", "OR") listeners[attr_name] = (condition_type, methods) - elif hasattr(attr_value, "__trigger_methods__"): - methods = attr_value.__trigger_methods__ - condition_type = getattr(attr_value, "__condition_type__", "OR") - listeners[attr_name] = (condition_type, methods) - if hasattr(attr_value, "__is_router__") and attr_value.__is_router__: - routers.add(attr_name) - possible_returns = get_possible_return_constants(attr_value) - if possible_returns: - router_paths[attr_name] = possible_returns + + if hasattr(attr_value, "__is_router__") and attr_value.__is_router__: + routers.add(attr_name) + possible_returns = get_possible_return_constants(attr_value) + if possible_returns: + router_paths[attr_name] = possible_returns setattr(cls, "_start_methods", start_methods) setattr(cls, "_listeners", listeners) @@ -351,6 +400,9 @@ def __new__(mcs, name, bases, dct): class Flow(Generic[T], metaclass=FlowMeta): + """Base class for all flows. + + Type parameter T must be either Dict[str, Any] or a subclass of BaseModel.""" _telemetry = Telemetry() _start_methods: List[str] = [] @@ -367,53 +419,220 @@ class _FlowGeneric(cls): # type: ignore _FlowGeneric.__name__ = f"{cls.__name__}[{item.__name__}]" return _FlowGeneric - def __init__(self) -> None: + def __init__( + self, + persistence: Optional[FlowPersistence] = None, + restore_uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + """Initialize a new Flow instance. + + Args: + persistence: Optional persistence backend for storing flow states + restore_uuid: Optional UUID to restore state from persistence + **kwargs: Additional state values to initialize or override + """ + # Initialize basic instance attributes self._methods: Dict[str, Callable] = {} - self._state: T = self._create_initial_state() self._method_execution_counts: Dict[str, int] = {} self._pending_and_listeners: Dict[str, Set[str]] = {} self._method_outputs: List[Any] = [] # List to store all method outputs + self._persistence: Optional[FlowPersistence] = persistence + + # Validate state model before initialization + if isinstance(self.initial_state, type): + if issubclass(self.initial_state, BaseModel) and not issubclass(self.initial_state, FlowState): + # Check if model has id field + model_fields = getattr(self.initial_state, "model_fields", None) + if not model_fields or "id" not in model_fields: + raise ValueError("Flow state model must have an 'id' field") + + # Handle persistence and potential ID conflicts + stored_state = None + if self._persistence is not None: + if restore_uuid and kwargs and "id" in kwargs and restore_uuid != kwargs["id"]: + raise ValueError( + f"Conflicting IDs provided: restore_uuid='{restore_uuid}' " + f"vs kwargs['id']='{kwargs['id']}'. Use only one ID for restoration." + ) + + # Attempt to load state, prioritizing restore_uuid + if restore_uuid: + stored_state = self._persistence.load_state(restore_uuid) + if not stored_state: + raise ValueError(f"No state found for restore_uuid='{restore_uuid}'") + elif kwargs and "id" in kwargs: + stored_state = self._persistence.load_state(kwargs["id"]) + if not stored_state: + # For kwargs["id"], we allow creating new state if not found + self._state = self._create_initial_state() + if kwargs: + self._initialize_state(kwargs) + return + + # Initialize state based on persistence and kwargs + if stored_state: + # Create initial state and restore from persistence + self._state = self._create_initial_state() + self._restore_state(stored_state) + # Apply any additional kwargs to override specific fields + if kwargs: + filtered_kwargs = {k: v for k, v in kwargs.items() if k != "id"} + if filtered_kwargs: + self._initialize_state(filtered_kwargs) + else: + # No stored state, create new state with initial values + self._state = self._create_initial_state() + # Apply any additional kwargs + if kwargs: + self._initialize_state(kwargs) self._telemetry.flow_creation_span(self.__class__.__name__) + # Register all flow-related methods for method_name in dir(self): - if callable(getattr(self, method_name)) and not method_name.startswith( - "__" - ): - self._methods[method_name] = getattr(self, method_name) - + if not method_name.startswith("_"): + method = getattr(self, method_name) + # Check for any flow-related attributes + if (hasattr(method, "__is_flow_method__") or + hasattr(method, "__is_start_method__") or + hasattr(method, "__trigger_methods__") or + hasattr(method, "__is_router__")): + # Ensure method is bound to this instance + if not hasattr(method, "__self__"): + method = method.__get__(self, self.__class__) + self._methods[method_name] = method + + + def _create_initial_state(self) -> T: + """Create and initialize flow state with UUID and default values. + + Returns: + New state instance with UUID and default values initialized + + Raises: + ValueError: If structured state model lacks 'id' field + TypeError: If state is neither BaseModel nor dictionary + """ # Handle case where initial_state is None but we have a type parameter if self.initial_state is None and hasattr(self, "_initial_state_T"): state_type = getattr(self, "_initial_state_T") if isinstance(state_type, type): if issubclass(state_type, FlowState): - return state_type() # type: ignore + # Create instance without id, then set it + instance = state_type() + if not hasattr(instance, 'id'): + setattr(instance, 'id', str(uuid4())) + return cast(T, instance) elif issubclass(state_type, BaseModel): # Create a new type that includes the ID field class StateWithId(state_type, FlowState): # type: ignore pass - return StateWithId() # type: ignore - + instance = StateWithId() + if not hasattr(instance, 'id'): + setattr(instance, 'id', str(uuid4())) + return cast(T, instance) + elif state_type == dict: + return cast(T, {"id": str(uuid4())}) # Minimal dict state + # Handle case where no initial state is provided if self.initial_state is None: - return {"id": str(uuid4())} # type: ignore - + return cast(T, {"id": str(uuid4())}) + # Handle case where initial_state is a type (class) if isinstance(self.initial_state, type): if issubclass(self.initial_state, FlowState): - return self.initial_state() # type: ignore + return cast(T, self.initial_state()) # Uses model defaults elif issubclass(self.initial_state, BaseModel): - # Create a new type that includes the ID field - class StateWithId(self.initial_state, FlowState): # type: ignore - pass - return StateWithId() # type: ignore + # Validate that the model has an id field + model_fields = getattr(self.initial_state, "model_fields", None) + if not model_fields or "id" not in model_fields: + raise ValueError("Flow state model must have an 'id' field") + return cast(T, self.initial_state()) # Uses model defaults + elif self.initial_state == dict: + return cast(T, {"id": str(uuid4())}) + + # Handle dictionary instance case + if isinstance(self.initial_state, dict): + new_state = dict(self.initial_state) # Copy to avoid mutations + if "id" not in new_state: + new_state["id"] = str(uuid4()) + return cast(T, new_state) + + # Handle BaseModel instance case + if isinstance(self.initial_state, BaseModel): + model = cast(BaseModel, self.initial_state) + if not hasattr(model, "id"): + raise ValueError("Flow state model must have an 'id' field") + + # Create new instance with same values to avoid mutations + if hasattr(model, "model_dump"): + # Pydantic v2 + state_dict = model.model_dump() + elif hasattr(model, "dict"): + # Pydantic v1 + state_dict = model.dict() + else: + # Fallback for other BaseModel implementations + state_dict = { + k: v for k, v in model.__dict__.items() + if not k.startswith("_") + } + + # Create new instance of the same class + model_class = type(model) + return cast(T, model_class(**state_dict)) + + raise TypeError( + f"Initial state must be dict or BaseModel, got {type(self.initial_state)}" + ) + # Handle case where initial_state is None but we have a type parameter + if self.initial_state is None and hasattr(self, "_initial_state_T"): + state_type = getattr(self, "_initial_state_T") + if isinstance(state_type, type): + if issubclass(state_type, FlowState): + return cast(T, state_type()) + elif issubclass(state_type, BaseModel): + # Create a new type that includes the ID field + class StateWithId(state_type, FlowState): # type: ignore + pass + return cast(T, StateWithId()) + elif state_type == dict: + return cast(T, {"id": str(uuid4())}) - # Handle dictionary case - if isinstance(self.initial_state, dict) and "id" not in self.initial_state: - self.initial_state["id"] = str(uuid4()) + # Handle case where no initial state is provided + if self.initial_state is None: + return cast(T, {"id": str(uuid4())}) - return self.initial_state # type: ignore + # Handle case where initial_state is a type (class) + if isinstance(self.initial_state, type): + if issubclass(self.initial_state, FlowState): + return cast(T, self.initial_state()) + elif issubclass(self.initial_state, BaseModel): + # Validate that the model has an id field + model_fields = getattr(self.initial_state, "model_fields", None) + if not model_fields or "id" not in model_fields: + raise ValueError("Flow state model must have an 'id' field") + return cast(T, self.initial_state()) + elif self.initial_state == dict: + return cast(T, {"id": str(uuid4())}) + + # Handle dictionary instance case + if isinstance(self.initial_state, dict): + if "id" not in self.initial_state: + self.initial_state["id"] = str(uuid4()) + return cast(T, dict(self.initial_state)) # Create new dict to avoid mutations + + # Handle BaseModel instance case + if isinstance(self.initial_state, BaseModel): + if not hasattr(self.initial_state, "id"): + raise ValueError("Flow state model must have an 'id' field") + return cast(T, self.initial_state) + + raise TypeError( + f"Initial state must be dict or BaseModel, got {type(self.initial_state)}" + ) @property def state(self) -> T: @@ -425,50 +644,95 @@ def method_outputs(self) -> List[Any]: return self._method_outputs def _initialize_state(self, inputs: Dict[str, Any]) -> None: + """Initialize or update flow state with new inputs. + + Args: + inputs: Dictionary of state values to set/update + + Raises: + ValueError: If validation fails for structured state + TypeError: If state is neither BaseModel nor dictionary + """ if isinstance(self._state, dict): - # Preserve the ID when updating unstructured state + # For dict states, preserve existing fields unless overridden current_id = self._state.get("id") - self._state.update(inputs) + # Only update specified fields + for k, v in inputs.items(): + self._state[k] = v + # Ensure ID is preserved or generated if current_id: self._state["id"] = current_id elif "id" not in self._state: self._state["id"] = str(uuid4()) elif isinstance(self._state, BaseModel): - # Structured state + # For BaseModel states, preserve existing fields unless overridden try: - def create_model_with_extra_forbid( - base_model: Type[BaseModel], - ) -> Type[BaseModel]: - class ModelWithExtraForbid(base_model): # type: ignore - model_config = base_model.model_config.copy() - model_config["extra"] = "forbid" - - return ModelWithExtraForbid - - # Get current state as dict, preserving the ID if it exists - state_model = cast(BaseModel, self._state) - current_state = ( - state_model.model_dump() - if hasattr(state_model, "model_dump") - else state_model.dict() - if hasattr(state_model, "dict") - else { - k: v - for k, v in state_model.__dict__.items() + model = cast(BaseModel, self._state) + # Get current state as dict + if hasattr(model, "model_dump"): + current_state = model.model_dump() + elif hasattr(model, "dict"): + current_state = model.dict() + else: + current_state = { + k: v for k, v in model.__dict__.items() if not k.startswith("_") } - ) - - ModelWithExtraForbid = create_model_with_extra_forbid( - self._state.__class__ - ) - self._state = cast( - T, ModelWithExtraForbid(**{**current_state, **inputs}) - ) + + # Create new state with preserved fields and updates + new_state = {**current_state, **inputs} + + # Create new instance with merged state + model_class = type(model) + if hasattr(model_class, "model_validate"): + # Pydantic v2 + self._state = cast(T, model_class.model_validate(new_state)) + elif hasattr(model_class, "parse_obj"): + # Pydantic v1 + self._state = cast(T, model_class.parse_obj(new_state)) + else: + # Fallback for other BaseModel implementations + self._state = cast(T, model_class(**new_state)) except ValidationError as e: raise ValueError(f"Invalid inputs for structured state: {e}") from e else: raise TypeError("State must be a BaseModel instance or a dictionary.") + + def _restore_state(self, stored_state: Dict[str, Any]) -> None: + """Restore flow state from persistence. + + Args: + stored_state: Previously stored state to restore + + Raises: + ValueError: If validation fails for structured state + TypeError: If state is neither BaseModel nor dictionary + """ + # When restoring from persistence, use the stored ID + stored_id = stored_state.get("id") + if not stored_id: + raise ValueError("Stored state must have an 'id' field") + + if isinstance(self._state, dict): + # For dict states, update all fields from stored state + self._state.clear() + self._state.update(stored_state) + elif isinstance(self._state, BaseModel): + # For BaseModel states, create new instance with stored values + model = cast(BaseModel, self._state) + if hasattr(model, "model_validate"): + # Pydantic v2 + self._state = cast(T, type(model).model_validate(stored_state)) + elif hasattr(model, "parse_obj"): + # Pydantic v1 + self._state = cast(T, type(model).parse_obj(stored_state)) + else: + # Fallback for other BaseModel implementations + self._state = cast(T, type(model)(**stored_state)) + else: + raise TypeError( + f"State must be dict or BaseModel, got {type(self._state)}" + ) def kickoff(self, inputs: Optional[Dict[str, Any]] = None) -> Any: self.event_emitter.send( diff --git a/src/crewai/flow/persistence/__init__.py b/src/crewai/flow/persistence/__init__.py new file mode 100644 index 0000000000..0b673f6bf4 --- /dev/null +++ b/src/crewai/flow/persistence/__init__.py @@ -0,0 +1,18 @@ +""" +CrewAI Flow Persistence. + +This module provides interfaces and implementations for persisting flow states. +""" + +from typing import Any, Dict, TypeVar, Union + +from pydantic import BaseModel + +from crewai.flow.persistence.base import FlowPersistence +from crewai.flow.persistence.decorators import persist +from crewai.flow.persistence.sqlite import SQLiteFlowPersistence + +__all__ = ["FlowPersistence", "persist", "SQLiteFlowPersistence"] + +StateType = TypeVar('StateType', bound=Union[Dict[str, Any], BaseModel]) +DictStateType = Dict[str, Any] diff --git a/src/crewai/flow/persistence/base.py b/src/crewai/flow/persistence/base.py new file mode 100644 index 0000000000..c926f6f348 --- /dev/null +++ b/src/crewai/flow/persistence/base.py @@ -0,0 +1,53 @@ +"""Base class for flow state persistence.""" + +import abc +from typing import Any, Dict, Optional, Union + +from pydantic import BaseModel + + +class FlowPersistence(abc.ABC): + """Abstract base class for flow state persistence. + + This class defines the interface that all persistence implementations must follow. + It supports both structured (Pydantic BaseModel) and unstructured (dict) states. + """ + + @abc.abstractmethod + def init_db(self) -> None: + """Initialize the persistence backend. + + This method should handle any necessary setup, such as: + - Creating tables + - Establishing connections + - Setting up indexes + """ + pass + + @abc.abstractmethod + def save_state( + self, + flow_uuid: str, + method_name: str, + state_data: Union[Dict[str, Any], BaseModel] + ) -> None: + """Persist the flow state after method completion. + + Args: + flow_uuid: Unique identifier for the flow instance + method_name: Name of the method that just completed + state_data: Current state data (either dict or Pydantic model) + """ + pass + + @abc.abstractmethod + def load_state(self, flow_uuid: str) -> Optional[Dict[str, Any]]: + """Load the most recent state for a given flow UUID. + + Args: + flow_uuid: Unique identifier for the flow instance + + Returns: + The most recent state as a dictionary, or None if no state exists + """ + pass diff --git a/src/crewai/flow/persistence/decorators.py b/src/crewai/flow/persistence/decorators.py new file mode 100644 index 0000000000..4906e95d56 --- /dev/null +++ b/src/crewai/flow/persistence/decorators.py @@ -0,0 +1,177 @@ +""" +Decorators for flow state persistence. + +Example: + ```python + from crewai.flow.flow import Flow, start + from crewai.flow.persistence import persist, SQLiteFlowPersistence + + class MyFlow(Flow): + @start() + @persist(SQLiteFlowPersistence()) + def sync_method(self): + # Synchronous method implementation + pass + + @start() + @persist(SQLiteFlowPersistence()) + async def async_method(self): + # Asynchronous method implementation + await some_async_operation() + ``` +""" + +import asyncio +import functools +import inspect +import logging +from typing import ( + Any, + Callable, + Dict, + Optional, + Type, + TypeVar, + Union, + cast, + get_type_hints, +) + +from pydantic import BaseModel + +from crewai.flow.persistence.base import FlowPersistence +from crewai.flow.persistence.sqlite import SQLiteFlowPersistence + +logger = logging.getLogger(__name__) +T = TypeVar("T") + + +def persist(persistence: Optional[FlowPersistence] = None): + """Decorator to persist flow state. + + This decorator can be applied at either the class level or method level. + When applied at the class level, it automatically persists all flow method + states. When applied at the method level, it persists only that method's + state. + + Args: + persistence: Optional FlowPersistence implementation to use. + If not provided, uses SQLiteFlowPersistence. + + Returns: + A decorator that can be applied to either a class or method + + Raises: + ValueError: If the flow state doesn't have an 'id' field + RuntimeError: If state persistence fails + + Example: + @persist # Class-level persistence with default SQLite + class MyFlow(Flow[MyState]): + @start() + def begin(self): + pass + """ + def _persist_state(flow_instance: Any, method_name: str, persistence_instance: FlowPersistence) -> None: + """Helper to persist state with error handling.""" + try: + # Get flow UUID from state + state = getattr(flow_instance, 'state', None) + if state is None: + raise ValueError("Flow instance has no state") + + flow_uuid: Optional[str] = None + if isinstance(state, dict): + flow_uuid = state.get('id') + elif isinstance(state, BaseModel): + flow_uuid = getattr(state, 'id', None) + + if not flow_uuid: + raise ValueError( + "Flow state must have an 'id' field for persistence" + ) + + # Persist the state + persistence_instance.save_state( + flow_uuid=flow_uuid, + method_name=method_name, + state_data=state, + ) + except Exception as e: + logger.error( + f"Failed to persist state for method {method_name}: {str(e)}" + ) + raise RuntimeError(f"State persistence failed: {str(e)}") from e + + def decorator(target: Union[Type, Callable[..., T]]) -> Union[Type, Callable[..., T]]: + """Decorator that handles both class and method decoration.""" + actual_persistence = persistence or SQLiteFlowPersistence() + + if isinstance(target, type): + # Class decoration + class_methods = {} + for name, method in target.__dict__.items(): + if callable(method) and hasattr(method, "__is_flow_method__"): + # Wrap each flow method with persistence + if asyncio.iscoroutinefunction(method): + @functools.wraps(method) + async def class_async_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + method_coro = method(self, *args, **kwargs) + if asyncio.iscoroutine(method_coro): + result = await method_coro + else: + result = method_coro + _persist_state(self, method.__name__, actual_persistence) + return result + class_methods[name] = class_async_wrapper + else: + @functools.wraps(method) + def class_sync_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + result = method(self, *args, **kwargs) + _persist_state(self, method.__name__, actual_persistence) + return result + class_methods[name] = class_sync_wrapper + + # Preserve flow-specific attributes + for attr in ["__is_start_method__", "__trigger_methods__", "__condition_type__", "__is_router__"]: + if hasattr(method, attr): + setattr(class_methods[name], attr, getattr(method, attr)) + setattr(class_methods[name], "__is_flow_method__", True) + + # Update class with wrapped methods + for name, method in class_methods.items(): + setattr(target, name, method) + return target + else: + # Method decoration + method = target + setattr(method, "__is_flow_method__", True) + + if asyncio.iscoroutinefunction(method): + @functools.wraps(method) + async def method_async_wrapper(flow_instance: Any, *args: Any, **kwargs: Any) -> T: + method_coro = method(flow_instance, *args, **kwargs) + if asyncio.iscoroutine(method_coro): + result = await method_coro + else: + result = method_coro + _persist_state(flow_instance, method.__name__, actual_persistence) + return result + for attr in ["__is_start_method__", "__trigger_methods__", "__condition_type__", "__is_router__"]: + if hasattr(method, attr): + setattr(method_async_wrapper, attr, getattr(method, attr)) + setattr(method_async_wrapper, "__is_flow_method__", True) + return cast(Callable[..., T], method_async_wrapper) + else: + @functools.wraps(method) + def method_sync_wrapper(flow_instance: Any, *args: Any, **kwargs: Any) -> T: + result = method(flow_instance, *args, **kwargs) + _persist_state(flow_instance, method.__name__, actual_persistence) + return result + for attr in ["__is_start_method__", "__trigger_methods__", "__condition_type__", "__is_router__"]: + if hasattr(method, attr): + setattr(method_sync_wrapper, attr, getattr(method, attr)) + setattr(method_sync_wrapper, "__is_flow_method__", True) + return cast(Callable[..., T], method_sync_wrapper) + + return decorator diff --git a/src/crewai/flow/persistence/sqlite.py b/src/crewai/flow/persistence/sqlite.py new file mode 100644 index 0000000000..bdd091b2b3 --- /dev/null +++ b/src/crewai/flow/persistence/sqlite.py @@ -0,0 +1,124 @@ +""" +SQLite-based implementation of flow state persistence. +""" + +import json +import os +import sqlite3 +import tempfile +from datetime import datetime +from typing import Any, Dict, Optional, Union + +from pydantic import BaseModel + +from crewai.flow.persistence.base import FlowPersistence + + +class SQLiteFlowPersistence(FlowPersistence): + """SQLite-based implementation of flow state persistence. + + This class provides a simple, file-based persistence implementation using SQLite. + It's suitable for development and testing, or for production use cases with + moderate performance requirements. + """ + + db_path: str # Type annotation for instance variable + + def __init__(self, db_path: Optional[str] = None): + """Initialize SQLite persistence. + + Args: + db_path: Path to the SQLite database file. If not provided, uses + db_storage_path() from utilities.paths. + + Raises: + ValueError: If db_path is invalid + """ + from crewai.utilities.paths import db_storage_path + # Get path from argument or default location + path = db_path or db_storage_path() + + if not path: + raise ValueError("Database path must be provided") + + self.db_path = path # Now mypy knows this is str + self.init_db() + + def init_db(self) -> None: + """Create the necessary tables if they don't exist.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS flow_states ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + flow_uuid TEXT NOT NULL, + method_name TEXT NOT NULL, + timestamp DATETIME NOT NULL, + state_json TEXT NOT NULL + ) + """) + # Add index for faster UUID lookups + conn.execute(""" + CREATE INDEX IF NOT EXISTS idx_flow_states_uuid + ON flow_states(flow_uuid) + """) + + def save_state( + self, + flow_uuid: str, + method_name: str, + state_data: Union[Dict[str, Any], BaseModel], + ) -> None: + """Save the current flow state to SQLite. + + Args: + flow_uuid: Unique identifier for the flow instance + method_name: Name of the method that just completed + state_data: Current state data (either dict or Pydantic model) + """ + # Convert state_data to dict, handling both Pydantic and dict cases + if isinstance(state_data, BaseModel): + state_dict = dict(state_data) # Use dict() for better type compatibility + elif isinstance(state_data, dict): + state_dict = state_data + else: + raise ValueError( + f"state_data must be either a Pydantic BaseModel or dict, got {type(state_data)}" + ) + + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + INSERT INTO flow_states ( + flow_uuid, + method_name, + timestamp, + state_json + ) VALUES (?, ?, ?, ?) + """, ( + flow_uuid, + method_name, + datetime.utcnow().isoformat(), + json.dumps(state_dict), + )) + + def load_state(self, flow_uuid: str) -> Optional[Dict[str, Any]]: + """Load the most recent state for a given flow UUID. + + Args: + flow_uuid: Unique identifier for the flow instance + + Returns: + The most recent state as a dictionary, or None if no state exists + """ + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute(""" + SELECT state_json + FROM flow_states + WHERE flow_uuid = ? + ORDER BY id DESC + LIMIT 1 + """, (flow_uuid,)) + row = cursor.fetchone() + + if row: + return json.loads(row[0]) + return None diff --git a/src/crewai/utilities/paths.py b/src/crewai/utilities/paths.py index 9bf167ee6c..5d91d1719e 100644 --- a/src/crewai/utilities/paths.py +++ b/src/crewai/utilities/paths.py @@ -5,14 +5,18 @@ """Path management utilities for CrewAI storage and configuration.""" -def db_storage_path(): - """Returns the path for database storage.""" +def db_storage_path() -> str: + """Returns the path for SQLite database storage. + + Returns: + str: Full path to the SQLite database file + """ app_name = get_project_directory_name() app_author = "CrewAI" data_dir = Path(appdirs.user_data_dir(app_name, app_author)) data_dir.mkdir(parents=True, exist_ok=True) - return data_dir + return str(data_dir / "crewai_flows.db") def get_project_directory_name(): diff --git a/tests/cassettes/test_agent_human_input.yaml b/tests/cassettes/test_agent_human_input.yaml index 16b9ac9a53..8c5fd3a80b 100644 --- a/tests/cassettes/test_agent_human_input.yaml +++ b/tests/cassettes/test_agent_human_input.yaml @@ -1,4 +1,87 @@ interactions: +- request: + body: !!binary | + CqcXCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS/hYKEgoQY3Jld2FpLnRl + bGVtZXRyeRJ5ChBuJJtOdNaB05mOW/p3915eEgj2tkAd3rZcASoQVG9vbCBVc2FnZSBFcnJvcjAB + OYa7/URvKBUYQUpcFEVvKBUYShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuODYuMEoPCgNsbG0SCAoG + Z3B0LTRvegIYAYUBAAEAABLJBwoQifhX01E5i+5laGdALAlZBBIIBuGM1aN+OPgqDENyZXcgQ3Jl + YXRlZDABORVGruBvKBUYQaipwOBvKBUYShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuODYuMEoaCg5w + eXRob25fdmVyc2lvbhIICgYzLjEyLjdKLgoIY3Jld19rZXkSIgogN2U2NjA4OTg5ODU5YTY3ZWVj + ODhlZWY3ZmNlODUyMjVKMQoHY3Jld19pZBImCiRiOThiNWEwMC01YTI1LTQxMDctYjQwNS1hYmYz + MjBhOGYzYThKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKEQoLY3Jld19tZW1vcnkSAhAA + ShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgB + SuQCCgtjcmV3X2FnZW50cxLUAgrRAlt7ImtleSI6ICIyMmFjZDYxMWU0NGVmNWZhYzA1YjUzM2Q3 + NWU4ODkzYiIsICJpZCI6ICJkNWIyMzM1YS0yMmIyLTQyZWEtYmYwNS03OTc3NmU3MmYzOTIiLCAi + cm9sZSI6ICJEYXRhIFNjaWVudGlzdCIsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAy + MCwgIm1heF9ycG0iOiBudWxsLCAiZnVuY3Rpb25fY2FsbGluZ19sbG0iOiAiIiwgImxsbSI6ICJn + cHQtNG8tbWluaSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJhbGxvd19jb2RlX2V4 + ZWN1dGlvbj8iOiBmYWxzZSwgIm1heF9yZXRyeV9saW1pdCI6IDIsICJ0b29sc19uYW1lcyI6IFsi + Z2V0IGdyZWV0aW5ncyJdfV1KkgIKCmNyZXdfdGFza3MSgwIKgAJbeyJrZXkiOiAiYTI3N2IzNGIy + YzE0NmYwYzU2YzVlMTM1NmU4ZjhhNTciLCAiaWQiOiAiMjJiZWMyMzEtY2QyMS00YzU4LTgyN2Ut + MDU4MWE4ZjBjMTExIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6 + IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJEYXRhIFNjaWVudGlzdCIsICJhZ2VudF9rZXkiOiAiMjJh + Y2Q2MTFlNDRlZjVmYWMwNWI1MzNkNzVlODg5M2IiLCAidG9vbHNfbmFtZXMiOiBbImdldCBncmVl + dGluZ3MiXX1degIYAYUBAAEAABKOAgoQ5WYoxRtTyPjge4BduhL0rRIIv2U6rvWALfwqDFRhc2sg + Q3JlYXRlZDABOX068uBvKBUYQZkv8+BvKBUYSi4KCGNyZXdfa2V5EiIKIDdlNjYwODk4OTg1OWE2 + N2VlYzg4ZWVmN2ZjZTg1MjI1SjEKB2NyZXdfaWQSJgokYjk4YjVhMDAtNWEyNS00MTA3LWI0MDUt + YWJmMzIwYThmM2E4Si4KCHRhc2tfa2V5EiIKIGEyNzdiMzRiMmMxNDZmMGM1NmM1ZTEzNTZlOGY4 + YTU3SjEKB3Rhc2tfaWQSJgokMjJiZWMyMzEtY2QyMS00YzU4LTgyN2UtMDU4MWE4ZjBjMTExegIY + AYUBAAEAABKQAQoQXyeDtJDFnyp2Fjk9YEGTpxIIaNE7gbhPNYcqClRvb2wgVXNhZ2UwATkaXTvj + bygVGEGvx0rjbygVGEoaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjg2LjBKHAoJdG9vbF9uYW1lEg8K + DUdldCBHcmVldGluZ3NKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABLVBwoQMWfznt0qwauEzl7T + UOQxRBII9q+pUS5EdLAqDENyZXcgQ3JlYXRlZDABORONPORvKBUYQSAoS+RvKBUYShoKDmNyZXdh + aV92ZXJzaW9uEggKBjAuODYuMEoaCg5weXRob25fdmVyc2lvbhIICgYzLjEyLjdKLgoIY3Jld19r + ZXkSIgogYzMwNzYwMDkzMjY3NjE0NDRkNTdjNzFkMWRhM2YyN2NKMQoHY3Jld19pZBImCiQ3OTQw + MTkyNS1iOGU5LTQ3MDgtODUzMC00NDhhZmEzYmY4YjBKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVl + bnRpYWxKEQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVj + cmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSuoCCgtjcmV3X2FnZW50cxLaAgrXAlt7ImtleSI6ICI5 + OGYzYjFkNDdjZTk2OWNmMDU3NzI3Yjc4NDE0MjVjZCIsICJpZCI6ICI5OTJkZjYyZi1kY2FiLTQy + OTUtOTIwNi05MDBkNDExNGIxZTkiLCAicm9sZSI6ICJGcmllbmRseSBOZWlnaGJvciIsICJ2ZXJi + b3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAyMCwgIm1heF9ycG0iOiBudWxsLCAiZnVuY3Rpb25f + Y2FsbGluZ19sbG0iOiAiIiwgImxsbSI6ICJncHQtNG8tbWluaSIsICJkZWxlZ2F0aW9uX2VuYWJs + ZWQ/IjogZmFsc2UsICJhbGxvd19jb2RlX2V4ZWN1dGlvbj8iOiBmYWxzZSwgIm1heF9yZXRyeV9s + aW1pdCI6IDIsICJ0b29sc19uYW1lcyI6IFsiZGVjaWRlIGdyZWV0aW5ncyJdfV1KmAIKCmNyZXdf + dGFza3MSiQIKhgJbeyJrZXkiOiAiODBkN2JjZDQ5MDk5MjkwMDgzODMyZjBlOTgzMzgwZGYiLCAi + aWQiOiAiMmZmNjE5N2UtYmEyNy00YjczLWI0YTctNGZhMDQ4ZTYyYjQ3IiwgImFzeW5jX2V4ZWN1 + dGlvbj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJGcmll + bmRseSBOZWlnaGJvciIsICJhZ2VudF9rZXkiOiAiOThmM2IxZDQ3Y2U5NjljZjA1NzcyN2I3ODQx + NDI1Y2QiLCAidG9vbHNfbmFtZXMiOiBbImRlY2lkZSBncmVldGluZ3MiXX1degIYAYUBAAEAABKO + AgoQnjTp5boK7/+DQxztYIpqihIIgGnMUkBtzHEqDFRhc2sgQ3JlYXRlZDABOcpYcuRvKBUYQalE + c+RvKBUYSi4KCGNyZXdfa2V5EiIKIGMzMDc2MDA5MzI2NzYxNDQ0ZDU3YzcxZDFkYTNmMjdjSjEK + B2NyZXdfaWQSJgokNzk0MDE5MjUtYjhlOS00NzA4LTg1MzAtNDQ4YWZhM2JmOGIwSi4KCHRhc2tf + a2V5EiIKIDgwZDdiY2Q0OTA5OTI5MDA4MzgzMmYwZTk4MzM4MGRmSjEKB3Rhc2tfaWQSJgokMmZm + NjE5N2UtYmEyNy00YjczLWI0YTctNGZhMDQ4ZTYyYjQ3egIYAYUBAAEAABKTAQoQ26H9pLUgswDN + p9XhJwwL6BIIx3bw7mAvPYwqClRvb2wgVXNhZ2UwATmy7NPlbygVGEEvb+HlbygVGEoaCg5jcmV3 + YWlfdmVyc2lvbhIICgYwLjg2LjBKHwoJdG9vbF9uYW1lEhIKEERlY2lkZSBHcmVldGluZ3NKDgoI + YXR0ZW1wdHMSAhgBegIYAYUBAAEAAA== + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2986' + Content-Type: + - application/x-protobuf + User-Agent: + - OTel-OTLP-Exporter-Python/1.27.0 + method: POST + uri: https://telemetry.crewai.com:4319/v1/traces + response: + body: + string: "\n\0" + headers: + Content-Length: + - '2' + Content-Type: + - application/x-protobuf + Date: + - Fri, 27 Dec 2024 22:14:53 GMT + status: + code: 200 + message: OK - request: body: '{"messages": [{"role": "system", "content": "You are test role. test backstory\nYour personal goal is: test goal\nTo give my best complete final answer to the task @@ -22,18 +105,20 @@ interactions: - '824' content-type: - application/json + cookie: + - _cfuvid=ePJSDFdHag2D8lj21_ijAMWjoA6xfnPNxN4uekvC728-1727226247743-0.0.1.1-604800000 host: - api.openai.com user-agent: - OpenAI/Python 1.52.1 x-stainless-arch: - - arm64 + - x64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - - MacOS + - Linux x-stainless-package-version: - 1.52.1 x-stainless-raw-response: @@ -47,8 +132,8 @@ interactions: method: POST uri: https://api.openai.com/v1/chat/completions response: - content: "{\n \"id\": \"chatcmpl-AaqIIsTxhvf75xvuu7gQScIlRSKbW\",\n \"object\": - \"chat.completion\",\n \"created\": 1733344190,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + content: "{\n \"id\": \"chatcmpl-AjCtZLLrWi8ZASpP9bz6HaCV7xBIn\",\n \"object\": + \"chat.completion\",\n \"created\": 1735337693,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": \"I now can give a great answer \\nFinal Answer: Hi\",\n \"refusal\": null\n },\n \"logprobs\": null,\n @@ -57,12 +142,12 @@ interactions: {\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"system_fingerprint\": - \"fp_0705bf87c0\"\n}\n" + \"fp_0aa8d3e20b\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8ece8cfc3b1f4532-ATL + - 8f8caa83deca756b-SEA Connection: - keep-alive Content-Encoding: @@ -70,14 +155,14 @@ interactions: Content-Type: - application/json Date: - - Wed, 04 Dec 2024 20:29:50 GMT + - Fri, 27 Dec 2024 22:14:53 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=QJZZjZ6eqnVamqUkw.Bx0mj7oBi3a_vGEH1VODcUxlg-1733344190-1.0.1.1-xyN0ekA9xIrSwEhRBmTiWJ3Pt72UYLU5owKfkz5yihVmMTfsr_Qz.ssGPJ5cuft066v1xVjb4zOSTdFmesMSKg; - path=/; expires=Wed, 04-Dec-24 20:59:50 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=wJkq_yLkzE3OdxE0aMJz.G0kce969.9JxRmZ0ratl4c-1735337693-1.0.1.1-OKpUoRrSPFGvWv5Hp5ET1PNZ7iZNHPKEAuakpcQUxxPSeisUIIR3qIOZ31MGmYugqB5.wkvidgbxOAagqJvmnw; + path=/; expires=Fri, 27-Dec-24 22:44:53 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=eCIkP8GVPvpkg19eOhCquWFHm.RTQBQy4yHLGGEAH5c-1733344190334-0.0.1.1-604800000; + - _cfuvid=A_ASCLNAVfQoyucWOAIhecWtEpNotYoZr0bAFihgNxs-1735337693273-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked @@ -90,7 +175,7 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '313' + - '404' openai-version: - '2020-10-01' strict-transport-security: @@ -108,7 +193,7 @@ interactions: x-ratelimit-reset-tokens: - 0s x-request-id: - - req_9fd9a8ee688045dcf7ac5f6fdf689372 + - req_6ac84634bff9193743c4b0911c09b4a6 http_version: HTTP/1.1 status_code: 200 - request: @@ -131,20 +216,20 @@ interactions: content-type: - application/json cookie: - - __cf_bm=QJZZjZ6eqnVamqUkw.Bx0mj7oBi3a_vGEH1VODcUxlg-1733344190-1.0.1.1-xyN0ekA9xIrSwEhRBmTiWJ3Pt72UYLU5owKfkz5yihVmMTfsr_Qz.ssGPJ5cuft066v1xVjb4zOSTdFmesMSKg; - _cfuvid=eCIkP8GVPvpkg19eOhCquWFHm.RTQBQy4yHLGGEAH5c-1733344190334-0.0.1.1-604800000 + - _cfuvid=A_ASCLNAVfQoyucWOAIhecWtEpNotYoZr0bAFihgNxs-1735337693273-0.0.1.1-604800000; + __cf_bm=wJkq_yLkzE3OdxE0aMJz.G0kce969.9JxRmZ0ratl4c-1735337693-1.0.1.1-OKpUoRrSPFGvWv5Hp5ET1PNZ7iZNHPKEAuakpcQUxxPSeisUIIR3qIOZ31MGmYugqB5.wkvidgbxOAagqJvmnw host: - api.openai.com user-agent: - OpenAI/Python 1.52.1 x-stainless-arch: - - arm64 + - x64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - - MacOS + - Linux x-stainless-package-version: - 1.52.1 x-stainless-raw-response: @@ -158,8 +243,8 @@ interactions: method: POST uri: https://api.openai.com/v1/chat/completions response: - content: "{\n \"id\": \"chatcmpl-AaqIIaQlLyoyPmk909PvAIfA2TmJL\",\n \"object\": - \"chat.completion\",\n \"created\": 1733344190,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + content: "{\n \"id\": \"chatcmpl-AjCtZNlWdrrPZhq0MJDqd16sMuQEJ\",\n \"object\": + \"chat.completion\",\n \"created\": 1735337693,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": \"True\",\n \"refusal\": null\n \ },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n @@ -168,12 +253,12 @@ interactions: 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n \ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"system_fingerprint\": - \"fp_0705bf87c0\"\n}\n" + \"fp_0aa8d3e20b\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8ece8d060b5e4532-ATL + - 8f8caa87094f756b-SEA Connection: - keep-alive Content-Encoding: @@ -181,7 +266,7 @@ interactions: Content-Type: - application/json Date: - - Wed, 04 Dec 2024 20:29:50 GMT + - Fri, 27 Dec 2024 22:14:53 GMT Server: - cloudflare Transfer-Encoding: @@ -195,7 +280,7 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '375' + - '156' openai-version: - '2020-10-01' strict-transport-security: @@ -213,7 +298,7 @@ interactions: x-ratelimit-reset-tokens: - 0s x-request-id: - - req_be7cb475e0859a82c37ee3f2871ea5ea + - req_ec74bef2a9ef7b2144c03fd7f7bbeab0 http_version: HTTP/1.1 status_code: 200 - request: @@ -242,20 +327,20 @@ interactions: content-type: - application/json cookie: - - __cf_bm=QJZZjZ6eqnVamqUkw.Bx0mj7oBi3a_vGEH1VODcUxlg-1733344190-1.0.1.1-xyN0ekA9xIrSwEhRBmTiWJ3Pt72UYLU5owKfkz5yihVmMTfsr_Qz.ssGPJ5cuft066v1xVjb4zOSTdFmesMSKg; - _cfuvid=eCIkP8GVPvpkg19eOhCquWFHm.RTQBQy4yHLGGEAH5c-1733344190334-0.0.1.1-604800000 + - _cfuvid=A_ASCLNAVfQoyucWOAIhecWtEpNotYoZr0bAFihgNxs-1735337693273-0.0.1.1-604800000; + __cf_bm=wJkq_yLkzE3OdxE0aMJz.G0kce969.9JxRmZ0ratl4c-1735337693-1.0.1.1-OKpUoRrSPFGvWv5Hp5ET1PNZ7iZNHPKEAuakpcQUxxPSeisUIIR3qIOZ31MGmYugqB5.wkvidgbxOAagqJvmnw host: - api.openai.com user-agent: - OpenAI/Python 1.52.1 x-stainless-arch: - - arm64 + - x64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - - MacOS + - Linux x-stainless-package-version: - 1.52.1 x-stainless-raw-response: @@ -269,22 +354,23 @@ interactions: method: POST uri: https://api.openai.com/v1/chat/completions response: - content: "{\n \"id\": \"chatcmpl-AaqIJAAxpVfUOdrsgYKHwfRlHv4RS\",\n \"object\": - \"chat.completion\",\n \"created\": 1733344191,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + content: "{\n \"id\": \"chatcmpl-AjCtZGv4f3h7GDdhyOy9G0sB1lRgC\",\n \"object\": + \"chat.completion\",\n \"created\": 1735337693,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": - \"assistant\",\n \"content\": \"Thought: I now can give a great answer - \ \\nFinal Answer: Hello\",\n \"refusal\": null\n },\n \"logprobs\": - null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": - 188,\n \"completion_tokens\": 14,\n \"total_tokens\": 202,\n \"prompt_tokens_details\": - {\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": - {\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": + \"assistant\",\n \"content\": \"Thought: I understand the feedback and + will adjust my response accordingly. \\nFinal Answer: Hello\",\n \"refusal\": + null\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n + \ }\n ],\n \"usage\": {\n \"prompt_tokens\": 188,\n \"completion_tokens\": + 18,\n \"total_tokens\": 206,\n \"prompt_tokens_details\": {\n \"cached_tokens\": + 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n + \ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"system_fingerprint\": - \"fp_0705bf87c0\"\n}\n" + \"fp_0aa8d3e20b\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8ece8d090fc34532-ATL + - 8f8caa88cac4756b-SEA Connection: - keep-alive Content-Encoding: @@ -292,7 +378,7 @@ interactions: Content-Type: - application/json Date: - - Wed, 04 Dec 2024 20:29:51 GMT + - Fri, 27 Dec 2024 22:14:54 GMT Server: - cloudflare Transfer-Encoding: @@ -306,7 +392,7 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '484' + - '358' openai-version: - '2020-10-01' strict-transport-security: @@ -324,7 +410,7 @@ interactions: x-ratelimit-reset-tokens: - 0s x-request-id: - - req_5bf4a565ad6c2567a1ed204ecac89134 + - req_ae1ab6b206d28ded6fee3c83ed0c2ab7 http_version: HTTP/1.1 status_code: 200 - request: @@ -346,20 +432,20 @@ interactions: content-type: - application/json cookie: - - __cf_bm=QJZZjZ6eqnVamqUkw.Bx0mj7oBi3a_vGEH1VODcUxlg-1733344190-1.0.1.1-xyN0ekA9xIrSwEhRBmTiWJ3Pt72UYLU5owKfkz5yihVmMTfsr_Qz.ssGPJ5cuft066v1xVjb4zOSTdFmesMSKg; - _cfuvid=eCIkP8GVPvpkg19eOhCquWFHm.RTQBQy4yHLGGEAH5c-1733344190334-0.0.1.1-604800000 + - _cfuvid=A_ASCLNAVfQoyucWOAIhecWtEpNotYoZr0bAFihgNxs-1735337693273-0.0.1.1-604800000; + __cf_bm=wJkq_yLkzE3OdxE0aMJz.G0kce969.9JxRmZ0ratl4c-1735337693-1.0.1.1-OKpUoRrSPFGvWv5Hp5ET1PNZ7iZNHPKEAuakpcQUxxPSeisUIIR3qIOZ31MGmYugqB5.wkvidgbxOAagqJvmnw host: - api.openai.com user-agent: - OpenAI/Python 1.52.1 x-stainless-arch: - - arm64 + - x64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - - MacOS + - Linux x-stainless-package-version: - 1.52.1 x-stainless-raw-response: @@ -373,8 +459,8 @@ interactions: method: POST uri: https://api.openai.com/v1/chat/completions response: - content: "{\n \"id\": \"chatcmpl-AaqIJqyG8vl9mxj2qDPZgaxyNLLIq\",\n \"object\": - \"chat.completion\",\n \"created\": 1733344191,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + content: "{\n \"id\": \"chatcmpl-AjCtaiHL4TY8Dssk0j2miqmjrzquy\",\n \"object\": + \"chat.completion\",\n \"created\": 1735337694,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": \"False\",\n \"refusal\": null\n \ },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n @@ -383,12 +469,12 @@ interactions: 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n \ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"system_fingerprint\": - \"fp_0705bf87c0\"\n}\n" + \"fp_0aa8d3e20b\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8ece8d0cfdeb4532-ATL + - 8f8caa8bdd26756b-SEA Connection: - keep-alive Content-Encoding: @@ -396,7 +482,7 @@ interactions: Content-Type: - application/json Date: - - Wed, 04 Dec 2024 20:29:51 GMT + - Fri, 27 Dec 2024 22:14:54 GMT Server: - cloudflare Transfer-Encoding: @@ -410,7 +496,7 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '341' + - '184' openai-version: - '2020-10-01' strict-transport-security: @@ -428,7 +514,7 @@ interactions: x-ratelimit-reset-tokens: - 0s x-request-id: - - req_5554bade8ceda00cf364b76a51b708ff + - req_652891f79c1104a7a8436275d78a69f1 http_version: HTTP/1.1 status_code: 200 version: 1 diff --git a/tests/crew_test.py b/tests/crew_test.py index 4f22c0d6ed..74a659738f 100644 --- a/tests/crew_test.py +++ b/tests/crew_test.py @@ -1228,6 +1228,7 @@ def test_kickoff_for_each_empty_input(): assert results == [] +@pytest.mark.vcr(filter_headers=["authorization"]) def test_kickoff_for_each_invalid_input(): """Tests if kickoff_for_each raises TypeError for invalid input types.""" diff --git a/tests/test_flow_persistence.py b/tests/test_flow_persistence.py new file mode 100644 index 0000000000..74971f30da --- /dev/null +++ b/tests/test_flow_persistence.py @@ -0,0 +1,195 @@ +"""Test flow state persistence functionality.""" + +import os +from typing import Dict, Optional + +import pytest +from pydantic import BaseModel + +from crewai.flow.flow import Flow, FlowState, start +from crewai.flow.persistence import FlowPersistence, persist +from crewai.flow.persistence.sqlite import SQLiteFlowPersistence + + +class TestState(FlowState): + """Test state model with required id field.""" + counter: int = 0 + message: str = "" + + +def test_persist_decorator_saves_state(tmp_path): + """Test that @persist decorator saves state in SQLite.""" + db_path = os.path.join(tmp_path, "test_flows.db") + persistence = SQLiteFlowPersistence(db_path) + + class TestFlow(Flow[Dict[str, str]]): + initial_state = dict() # Use dict instance as initial state + + @start() + @persist(persistence) + def init_step(self): + self.state["message"] = "Hello, World!" + self.state["id"] = "test-uuid" # Ensure we have an ID for persistence + + # Run flow and verify state is saved + flow = TestFlow(persistence=persistence) + flow.kickoff() + + # Load state from DB and verify + saved_state = persistence.load_state(flow.state["id"]) + assert saved_state is not None + assert saved_state["message"] == "Hello, World!" + + +def test_structured_state_persistence(tmp_path): + """Test persistence with Pydantic model state.""" + db_path = os.path.join(tmp_path, "test_flows.db") + persistence = SQLiteFlowPersistence(db_path) + + class StructuredFlow(Flow[TestState]): + initial_state = TestState + + @start() + @persist(persistence) + def count_up(self): + self.state.counter += 1 + self.state.message = f"Count is {self.state.counter}" + + # Run flow and verify state changes are saved + flow = StructuredFlow(persistence=persistence) + flow.kickoff() + + # Load and verify state + saved_state = persistence.load_state(flow.state.id) + assert saved_state is not None + assert saved_state["counter"] == 1 + assert saved_state["message"] == "Count is 1" + + +def test_flow_state_restoration(tmp_path): + """Test restoring flow state from persistence with various restoration methods.""" + db_path = os.path.join(tmp_path, "test_flows.db") + persistence = SQLiteFlowPersistence(db_path) + + # First flow execution to create initial state + class RestorableFlow(Flow[TestState]): + initial_state = TestState + + @start() + @persist(persistence) + def set_message(self): + self.state.message = "Original message" + self.state.counter = 42 + + # Create and persist initial state + flow1 = RestorableFlow(persistence=persistence) + flow1.kickoff() + original_uuid = flow1.state.id + + # Test case 1: Restore using restore_uuid with field override + flow2 = RestorableFlow( + persistence=persistence, + restore_uuid=original_uuid, + counter=43, # Override counter + ) + + # Verify state restoration and selective field override + assert flow2.state.id == original_uuid + assert flow2.state.message == "Original message" # Preserved + assert flow2.state.counter == 43 # Overridden + + # Test case 2: Restore using kwargs['id'] + flow3 = RestorableFlow( + persistence=persistence, + id=original_uuid, + message="Updated message", # Override message + ) + + # Verify state restoration and selective field override + assert flow3.state.id == original_uuid + assert flow3.state.counter == 42 # Preserved + assert flow3.state.message == "Updated message" # Overridden + + # Test case 3: Verify error on conflicting IDs + with pytest.raises(ValueError) as exc_info: + RestorableFlow( + persistence=persistence, + restore_uuid=original_uuid, + id="different-id", # Conflict with restore_uuid + ) + assert "Conflicting IDs provided" in str(exc_info.value) + + # Test case 4: Verify error on non-existent restore_uuid + with pytest.raises(ValueError) as exc_info: + RestorableFlow( + persistence=persistence, + restore_uuid="non-existent-uuid", + ) + assert "No state found" in str(exc_info.value) + + # Test case 5: Allow new state creation with kwargs['id'] + new_uuid = "new-flow-id" + flow4 = RestorableFlow( + persistence=persistence, + id=new_uuid, + message="New message", + counter=100, + ) + + # Verify new state creation with provided ID + assert flow4.state.id == new_uuid + assert flow4.state.message == "New message" + assert flow4.state.counter == 100 + + +def test_multiple_method_persistence(tmp_path): + """Test state persistence across multiple method executions.""" + db_path = os.path.join(tmp_path, "test_flows.db") + persistence = SQLiteFlowPersistence(db_path) + + class MultiStepFlow(Flow[TestState]): + initial_state = TestState + + @start() + @persist(persistence) + def step_1(self): + self.state.counter = 1 + self.state.message = "Step 1" + + @start() + @persist(persistence) + def step_2(self): + self.state.counter = 2 + self.state.message = "Step 2" + + flow = MultiStepFlow(persistence=persistence) + flow.kickoff() + + # Load final state + final_state = persistence.load_state(flow.state.id) + assert final_state is not None + assert final_state["counter"] == 2 + assert final_state["message"] == "Step 2" + + +def test_persistence_error_handling(tmp_path): + """Test error handling in persistence operations.""" + db_path = os.path.join(tmp_path, "test_flows.db") + persistence = SQLiteFlowPersistence(db_path) + + class InvalidFlow(Flow[TestState]): + # Missing id field in initial state + class InvalidState(BaseModel): + value: str = "" + + initial_state = InvalidState + + @start() + @persist(persistence) + def will_fail(self): + self.state.value = "test" + + with pytest.raises(ValueError) as exc_info: + flow = InvalidFlow(persistence=persistence) + + assert "must have an 'id' field" in str(exc_info.value) diff --git a/uv.lock b/uv.lock index 56ed758dfd..11f2e6691e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,7 @@ version = 1 requires-python = ">=3.10, <3.13" resolution-markers = [ + "python_full_version < '3.11' and platform_system == 'Darwin' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'darwin'", "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version < '3.11' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')", @@ -36,7 +37,7 @@ resolution-markers = [ "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'", "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')", "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'", - "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')" ] [[package]] @@ -345,7 +346,7 @@ name = "build" version = "1.2.2.post1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "os_name == 'nt'" }, + { name = "colorama", marker = "(os_name == 'nt' and platform_machine != 'aarch64' and sys_platform == 'linux') or (os_name == 'nt' and sys_platform != 'darwin' and sys_platform != 'linux')" }, { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, { name = "packaging" }, { name = "pyproject-hooks" }, @@ -580,7 +581,7 @@ name = "click" version = "8.1.7" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } wheels = [ @@ -2587,7 +2588,7 @@ version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "ghp-import" }, { name = "jinja2" }, { name = "markdown" }, @@ -2768,7 +2769,7 @@ version = "2.10.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pygments" }, - { name = "pywin32", marker = "platform_system == 'Windows'" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3a/93/80ac75c20ce54c785648b4ed363c88f148bf22637e10c9863db4fbe73e74/mpire-2.10.2.tar.gz", hash = "sha256:f66a321e93fadff34585a4bfa05e95bd946cf714b442f51c529038eb45773d97", size = 271270 } @@ -3015,7 +3016,7 @@ name = "nvidia-cudnn-cu12" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" } ] wheels = [ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, @@ -3044,7 +3045,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, { name = "nvidia-cusparse-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, - { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" } ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 }, @@ -3055,7 +3056,7 @@ name = "nvidia-cusparse-cu12" version = "12.1.0.106" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" } ] wheels = [ { url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 }, @@ -3605,7 +3606,7 @@ name = "portalocker" version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pywin32", marker = "platform_system == 'Windows'" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } wheels = [ @@ -5193,19 +5194,19 @@ dependencies = [ { name = "fsspec" }, { name = "jinja2" }, { name = "networkx" }, - { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "sympy" }, - { name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions" }, ] wheels = [ @@ -5252,7 +5253,7 @@ name = "tqdm" version = "4.66.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 } wheels = [ @@ -5295,7 +5296,7 @@ version = "0.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, - { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, + { name = "cffi", marker = "(implementation_name != 'pypy' and os_name == 'nt' and platform_machine != 'aarch64' and sys_platform == 'linux') or (implementation_name != 'pypy' and os_name == 'nt' and sys_platform != 'darwin' and sys_platform != 'linux')" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, { name = "outcome" }, @@ -5326,7 +5327,7 @@ name = "triton" version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "filelock", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "filelock", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" } ] wheels = [ { url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 }, From 42311d9c7ab6d033784ce2035a03c3f695fc0173 Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 11:18:54 -0300 Subject: [PATCH 2/6] Fix SQLite log handling issue causing ValueError: Logs cannot be None in tests (#1899) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix SQLite log handling issue causing ValueError: Logs cannot be None in tests - Add proper error handling in SQLite storage operations - Set up isolated test environment with temporary storage directory - Ensure consistent error messages across all database operations Co-Authored-By: Joe Moura * fix: Sort imports in conftest.py Co-Authored-By: Joe Moura * fix: Convert TokenProcess counters to instance variables to fix callback tracking Co-Authored-By: Joe Moura * refactor: Replace print statements with logging and improve error handling - Add proper logging setup in kickoff_task_outputs_storage.py - Replace self._printer.print() with logger calls - Use appropriate log levels (error/warning) - Add directory validation in test environment setup - Maintain consistent error messages with DatabaseError format Co-Authored-By: Joe Moura * fix: Comprehensive improvements to database and token handling - Fix SQLite database path handling in storage classes - Add proper directory creation and error handling - Improve token tracking with robust type checking - Convert TokenProcess counters to instance variables - Add standardized database error handling - Set up isolated test environment with temporary storage Resolves test failures in PR #1899 Co-Authored-By: Joe Moura --------- Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Co-authored-by: Joe Moura Co-authored-by: João Moura --- .../utilities/base_token_process.py | 11 +- src/crewai/llm.py | 13 ++ .../storage/kickoff_task_outputs_storage.py | 118 ++++++++++++------ .../memory/storage/ltm_sqlite_storage.py | 8 +- src/crewai/utilities/errors.py | 39 ++++++ .../utilities/token_counter_callback.py | 20 +-- tests/conftest.py | 33 +++++ 7 files changed, 193 insertions(+), 49 deletions(-) create mode 100644 src/crewai/utilities/errors.py diff --git a/src/crewai/agents/agent_builder/utilities/base_token_process.py b/src/crewai/agents/agent_builder/utilities/base_token_process.py index 320d34caaf..322fade0e4 100644 --- a/src/crewai/agents/agent_builder/utilities/base_token_process.py +++ b/src/crewai/agents/agent_builder/utilities/base_token_process.py @@ -2,11 +2,12 @@ class TokenProcess: - total_tokens: int = 0 - prompt_tokens: int = 0 - cached_prompt_tokens: int = 0 - completion_tokens: int = 0 - successful_requests: int = 0 + def __init__(self): + self.total_tokens: int = 0 + self.prompt_tokens: int = 0 + self.cached_prompt_tokens: int = 0 + self.completion_tokens: int = 0 + self.successful_requests: int = 0 def sum_prompt_tokens(self, tokens: int): self.prompt_tokens = self.prompt_tokens + tokens diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 5bc58dfe06..790d13ead6 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -222,6 +222,19 @@ def call( ].message text_response = response_message.content or "" tool_calls = getattr(response_message, "tool_calls", []) + + # Ensure callbacks get the full response object with usage info + if callbacks and len(callbacks) > 0: + for callback in callbacks: + if hasattr(callback, "log_success_event"): + usage_info = getattr(response, "usage", None) + if usage_info: + callback.log_success_event( + kwargs=params, + response_obj={"usage": usage_info}, + start_time=0, + end_time=0, + ) # --- 2) If no tool calls, return the text response if not tool_calls or not available_functions: diff --git a/src/crewai/memory/storage/kickoff_task_outputs_storage.py b/src/crewai/memory/storage/kickoff_task_outputs_storage.py index 26905191cb..ef99e7b86e 100644 --- a/src/crewai/memory/storage/kickoff_task_outputs_storage.py +++ b/src/crewai/memory/storage/kickoff_task_outputs_storage.py @@ -1,12 +1,17 @@ import json +import logging import sqlite3 +from pathlib import Path from typing import Any, Dict, List, Optional from crewai.task import Task from crewai.utilities import Printer from crewai.utilities.crew_json_encoder import CrewJSONEncoder +from crewai.utilities.errors import DatabaseError, DatabaseOperationError from crewai.utilities.paths import db_storage_path +logger = logging.getLogger(__name__) + class KickoffTaskOutputsSQLiteStorage: """ @@ -14,15 +19,24 @@ class KickoffTaskOutputsSQLiteStorage: """ def __init__( - self, db_path: str = f"{db_storage_path()}/latest_kickoff_task_outputs.db" + self, db_path: Optional[str] = None ) -> None: + if db_path is None: + # Get the parent directory of the default db path and create our db file there + db_path = str(Path(db_storage_path()).parent / "latest_kickoff_task_outputs.db") self.db_path = db_path self._printer: Printer = Printer() self._initialize_db() - def _initialize_db(self): - """ - Initializes the SQLite database and creates LTM table + def _initialize_db(self) -> None: + """Initialize the SQLite database and create the latest_kickoff_task_outputs table. + + This method sets up the database schema for storing task outputs. It creates + a table with columns for task_id, expected_output, output (as JSON), + task_index, inputs (as JSON), was_replayed flag, and timestamp. + + Raises: + DatabaseOperationError: If database initialization fails due to SQLite errors. """ try: with sqlite3.connect(self.db_path) as conn: @@ -43,10 +57,9 @@ def _initialize_db(self): conn.commit() except sqlite3.Error as e: - self._printer.print( - content=f"SAVING KICKOFF TASK OUTPUTS ERROR: An error occurred during database initialization: {e}", - color="red", - ) + error_msg = DatabaseError.format_error(DatabaseError.INIT_ERROR, e) + logger.error(error_msg) + raise DatabaseOperationError(error_msg, e) def add( self, @@ -55,9 +68,22 @@ def add( task_index: int, was_replayed: bool = False, inputs: Dict[str, Any] = {}, - ): + ) -> None: + """Add a new task output record to the database. + + Args: + task: The Task object containing task details. + output: Dictionary containing the task's output data. + task_index: Integer index of the task in the sequence. + was_replayed: Boolean indicating if this was a replay execution. + inputs: Dictionary of input parameters used for the task. + + Raises: + DatabaseOperationError: If saving the task output fails due to SQLite errors. + """ try: with sqlite3.connect(self.db_path) as conn: + conn.execute("BEGIN TRANSACTION") cursor = conn.cursor() cursor.execute( """ @@ -76,21 +102,31 @@ def add( ) conn.commit() except sqlite3.Error as e: - self._printer.print( - content=f"SAVING KICKOFF TASK OUTPUTS ERROR: An error occurred during database initialization: {e}", - color="red", - ) + error_msg = DatabaseError.format_error(DatabaseError.SAVE_ERROR, e) + logger.error(error_msg) + raise DatabaseOperationError(error_msg, e) def update( self, task_index: int, - **kwargs, - ): - """ - Updates an existing row in the latest_kickoff_task_outputs table based on task_index. + **kwargs: Any, + ) -> None: + """Update an existing task output record in the database. + + Updates fields of a task output record identified by task_index. The fields + to update are provided as keyword arguments. + + Args: + task_index: Integer index of the task to update. + **kwargs: Arbitrary keyword arguments representing fields to update. + Values that are dictionaries will be JSON encoded. + + Raises: + DatabaseOperationError: If updating the task output fails due to SQLite errors. """ try: with sqlite3.connect(self.db_path) as conn: + conn.execute("BEGIN TRANSACTION") cursor = conn.cursor() fields = [] @@ -110,14 +146,23 @@ def update( conn.commit() if cursor.rowcount == 0: - self._printer.print( - f"No row found with task_index {task_index}. No update performed.", - color="red", - ) + logger.warning(f"No row found with task_index {task_index}. No update performed.") except sqlite3.Error as e: - self._printer.print(f"UPDATE KICKOFF TASK OUTPUTS ERROR: {e}", color="red") + error_msg = DatabaseError.format_error(DatabaseError.UPDATE_ERROR, e) + logger.error(error_msg) + raise DatabaseOperationError(error_msg, e) + + def load(self) -> List[Dict[str, Any]]: + """Load all task output records from the database. - def load(self) -> Optional[List[Dict[str, Any]]]: + Returns: + List of dictionaries containing task output records, ordered by task_index. + Each dictionary contains: task_id, expected_output, output, task_index, + inputs, was_replayed, and timestamp. + + Raises: + DatabaseOperationError: If loading task outputs fails due to SQLite errors. + """ try: with sqlite3.connect(self.db_path) as conn: cursor = conn.cursor() @@ -144,23 +189,26 @@ def load(self) -> Optional[List[Dict[str, Any]]]: return results except sqlite3.Error as e: - self._printer.print( - content=f"LOADING KICKOFF TASK OUTPUTS ERROR: An error occurred while querying kickoff task outputs: {e}", - color="red", - ) - return None + error_msg = DatabaseError.format_error(DatabaseError.LOAD_ERROR, e) + logger.error(error_msg) + raise DatabaseOperationError(error_msg, e) - def delete_all(self): - """ - Deletes all rows from the latest_kickoff_task_outputs table. + def delete_all(self) -> None: + """Delete all task output records from the database. + + This method removes all records from the latest_kickoff_task_outputs table. + Use with caution as this operation cannot be undone. + + Raises: + DatabaseOperationError: If deleting task outputs fails due to SQLite errors. """ try: with sqlite3.connect(self.db_path) as conn: + conn.execute("BEGIN TRANSACTION") cursor = conn.cursor() cursor.execute("DELETE FROM latest_kickoff_task_outputs") conn.commit() except sqlite3.Error as e: - self._printer.print( - content=f"ERROR: Failed to delete all kickoff task outputs: {e}", - color="red", - ) + error_msg = DatabaseError.format_error(DatabaseError.DELETE_ERROR, e) + logger.error(error_msg) + raise DatabaseOperationError(error_msg, e) diff --git a/src/crewai/memory/storage/ltm_sqlite_storage.py b/src/crewai/memory/storage/ltm_sqlite_storage.py index 93d993ee67..3d12087229 100644 --- a/src/crewai/memory/storage/ltm_sqlite_storage.py +++ b/src/crewai/memory/storage/ltm_sqlite_storage.py @@ -1,5 +1,6 @@ import json import sqlite3 +from pathlib import Path from typing import Any, Dict, List, Optional, Union from crewai.utilities import Printer @@ -12,10 +13,15 @@ class LTMSQLiteStorage: """ def __init__( - self, db_path: str = f"{db_storage_path()}/long_term_memory_storage.db" + self, db_path: Optional[str] = None ) -> None: + if db_path is None: + # Get the parent directory of the default db path and create our db file there + db_path = str(Path(db_storage_path()).parent / "long_term_memory_storage.db") self.db_path = db_path self._printer: Printer = Printer() + # Ensure parent directory exists + Path(self.db_path).parent.mkdir(parents=True, exist_ok=True) self._initialize_db() def _initialize_db(self): diff --git a/src/crewai/utilities/errors.py b/src/crewai/utilities/errors.py new file mode 100644 index 0000000000..f673c0600a --- /dev/null +++ b/src/crewai/utilities/errors.py @@ -0,0 +1,39 @@ +"""Error message definitions for CrewAI database operations.""" +from typing import Optional + + +class DatabaseOperationError(Exception): + """Base exception class for database operation errors.""" + + def __init__(self, message: str, original_error: Optional[Exception] = None): + """Initialize the database operation error. + + Args: + message: The error message to display + original_error: The original exception that caused this error, if any + """ + super().__init__(message) + self.original_error = original_error + + +class DatabaseError: + """Standardized error message templates for database operations.""" + + INIT_ERROR: str = "Database initialization error: {}" + SAVE_ERROR: str = "Error saving task outputs: {}" + UPDATE_ERROR: str = "Error updating task outputs: {}" + LOAD_ERROR: str = "Error loading task outputs: {}" + DELETE_ERROR: str = "Error deleting task outputs: {}" + + @classmethod + def format_error(cls, template: str, error: Exception) -> str: + """Format an error message with the given template and error. + + Args: + template: The error message template to use + error: The exception to format into the template + + Returns: + The formatted error message + """ + return template.format(str(error)) diff --git a/src/crewai/utilities/token_counter_callback.py b/src/crewai/utilities/token_counter_callback.py index ee26d41ab4..e612fcae4b 100644 --- a/src/crewai/utilities/token_counter_callback.py +++ b/src/crewai/utilities/token_counter_callback.py @@ -23,11 +23,15 @@ def log_success_event( with warnings.catch_warnings(): warnings.simplefilter("ignore", UserWarning) - usage: Usage = response_obj["usage"] - self.token_cost_process.sum_successful_requests(1) - self.token_cost_process.sum_prompt_tokens(usage.prompt_tokens) - self.token_cost_process.sum_completion_tokens(usage.completion_tokens) - if usage.prompt_tokens_details: - self.token_cost_process.sum_cached_prompt_tokens( - usage.prompt_tokens_details.cached_tokens - ) + if isinstance(response_obj, dict) and "usage" in response_obj: + usage: Usage = response_obj["usage"] + if usage: + self.token_cost_process.sum_successful_requests(1) + if hasattr(usage, "prompt_tokens"): + self.token_cost_process.sum_prompt_tokens(usage.prompt_tokens) + if hasattr(usage, "completion_tokens"): + self.token_cost_process.sum_completion_tokens(usage.completion_tokens) + if hasattr(usage, "prompt_tokens_details") and usage.prompt_tokens_details: + self.token_cost_process.sum_cached_prompt_tokens( + usage.prompt_tokens_details.cached_tokens + ) diff --git a/tests/conftest.py b/tests/conftest.py index 4fdb3b1441..518c69a816 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,37 @@ # conftest.py +import os +import tempfile +from pathlib import Path + +import pytest from dotenv import load_dotenv load_result = load_dotenv(override=True) + +@pytest.fixture(autouse=True) +def setup_test_environment(): + """Set up test environment with a temporary directory for SQLite storage.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create the directory with proper permissions + storage_dir = Path(temp_dir) / "crewai_test_storage" + storage_dir.mkdir(parents=True, exist_ok=True) + + # Validate that the directory was created successfully + if not storage_dir.exists() or not storage_dir.is_dir(): + raise RuntimeError(f"Failed to create test storage directory: {storage_dir}") + + # Verify directory permissions + try: + # Try to create a test file to verify write permissions + test_file = storage_dir / ".permissions_test" + test_file.touch() + test_file.unlink() + except (OSError, IOError) as e: + raise RuntimeError(f"Test storage directory {storage_dir} is not writable: {e}") + + # Set environment variable to point to the test storage directory + os.environ["CREWAI_STORAGE_DIR"] = str(storage_dir) + + yield + + # Cleanup is handled automatically when tempfile context exits From b5779dca125b449d05dd489a46e3314e74e0d415 Mon Sep 17 00:00:00 2001 From: "Brandon Hancock (bhancock_ai)" <109994880+bhancockio@users.noreply.github.com> Date: Thu, 16 Jan 2025 11:28:58 -0500 Subject: [PATCH 3/6] Fix nested pydantic model issue (#1905) * Fix nested pydantic model issue * fix failing tests * add in vcr * cleanup * drop prints * Fix vcr issues * added new recordings * trying to fix vcr * add in fix from lorenze. --- .../utilities/base_output_converter.py | 2 +- src/crewai/utilities/converter.py | 23 +- src/crewai/utilities/internal_instructor.py | 9 +- .../utilities/pydantic_schema_parser.py | 107 +- .../test_convert_with_instructions.yaml | 114 + .../test_converter_with_llama3_1_model.yaml | 2048 +++++++++++++++++ .../test_converter_with_llama3_2_model.yaml | 869 +++++++ .../test_converter_with_nested_model.yaml | 116 + tests/utilities/test_converter.py | 307 ++- .../utilities/test_pydantic_schema_parser.py | 94 + 10 files changed, 3626 insertions(+), 63 deletions(-) create mode 100644 tests/utilities/cassettes/test_convert_with_instructions.yaml create mode 100644 tests/utilities/cassettes/test_converter_with_llama3_1_model.yaml create mode 100644 tests/utilities/cassettes/test_converter_with_llama3_2_model.yaml create mode 100644 tests/utilities/cassettes/test_converter_with_nested_model.yaml create mode 100644 tests/utilities/test_pydantic_schema_parser.py diff --git a/src/crewai/agents/agent_builder/utilities/base_output_converter.py b/src/crewai/agents/agent_builder/utilities/base_output_converter.py index 448803c158..454edc5f34 100644 --- a/src/crewai/agents/agent_builder/utilities/base_output_converter.py +++ b/src/crewai/agents/agent_builder/utilities/base_output_converter.py @@ -25,7 +25,7 @@ class OutputConverter(BaseModel, ABC): llm: Any = Field(description="The language model to be used to convert the text.") model: Any = Field(description="The model to be used to convert the text.") instructions: str = Field(description="Conversion instructions to the LLM.") - max_attempts: Optional[int] = Field( + max_attempts: int = Field( description="Max number of attempts to try to get the output formatted.", default=3, ) diff --git a/src/crewai/utilities/converter.py b/src/crewai/utilities/converter.py index ba958ddc66..e9f8c6b8e6 100644 --- a/src/crewai/utilities/converter.py +++ b/src/crewai/utilities/converter.py @@ -26,17 +26,24 @@ def to_pydantic(self, current_attempt=1): if self.llm.supports_function_calling(): return self._create_instructor().to_pydantic() else: - return self.llm.call( + response = self.llm.call( [ {"role": "system", "content": self.instructions}, {"role": "user", "content": self.text}, ] ) + return self.model.model_validate_json(response) + except ValidationError as e: + if current_attempt < self.max_attempts: + return self.to_pydantic(current_attempt + 1) + raise ConverterError( + f"Failed to convert text into a Pydantic model due to the following validation error: {e}" + ) except Exception as e: if current_attempt < self.max_attempts: return self.to_pydantic(current_attempt + 1) - return ConverterError( - f"Failed to convert text into a pydantic model due to the following error: {e}" + raise ConverterError( + f"Failed to convert text into a Pydantic model due to the following error: {e}" ) def to_json(self, current_attempt=1): @@ -66,7 +73,6 @@ def _create_instructor(self): llm=self.llm, model=self.model, content=self.text, - instructions=self.instructions, ) return inst @@ -187,10 +193,15 @@ def convert_with_instructions( def get_conversion_instructions(model: Type[BaseModel], llm: Any) -> str: - instructions = "I'm gonna convert this raw text into valid JSON." + instructions = "Please convert the following text into valid JSON." if llm.supports_function_calling(): model_schema = PydanticSchemaParser(model=model).get_schema() - instructions = f"{instructions}\n\nThe json should have the following structure, with the following keys:\n{model_schema}" + instructions += ( + f"\n\nThe JSON should follow this schema:\n```json\n{model_schema}\n```" + ) + else: + model_description = generate_model_description(model) + instructions += f"\n\nThe JSON should follow this format:\n{model_description}" return instructions diff --git a/src/crewai/utilities/internal_instructor.py b/src/crewai/utilities/internal_instructor.py index 65a05a61f4..e9401c7789 100644 --- a/src/crewai/utilities/internal_instructor.py +++ b/src/crewai/utilities/internal_instructor.py @@ -11,12 +11,10 @@ def __init__( model: Type, agent: Optional[Any] = None, llm: Optional[str] = None, - instructions: Optional[str] = None, ): self.content = content self.agent = agent self.llm = llm - self.instructions = instructions self.model = model self._client = None self.set_instructor() @@ -31,10 +29,7 @@ def set_instructor(self): import instructor from litellm import completion - self._client = instructor.from_litellm( - completion, - mode=instructor.Mode.TOOLS, - ) + self._client = instructor.from_litellm(completion) def to_json(self): model = self.to_pydantic() @@ -42,8 +37,6 @@ def to_json(self): def to_pydantic(self): messages = [{"role": "user", "content": self.content}] - if self.instructions: - messages.append({"role": "system", "content": self.instructions}) model = self._client.chat.completions.create( model=self.llm.model, response_model=self.model, messages=messages ) diff --git a/src/crewai/utilities/pydantic_schema_parser.py b/src/crewai/utilities/pydantic_schema_parser.py index f4c8c720f2..2827d70aad 100644 --- a/src/crewai/utilities/pydantic_schema_parser.py +++ b/src/crewai/utilities/pydantic_schema_parser.py @@ -1,4 +1,4 @@ -from typing import Type, Union, get_args, get_origin +from typing import Dict, List, Type, Union, get_args, get_origin from pydantic import BaseModel @@ -10,40 +10,83 @@ def get_schema(self) -> str: """ Public method to get the schema of a Pydantic model. - :param model: The Pydantic model class to generate schema for. :return: String representation of the model schema. """ - return self._get_model_schema(self.model) - - def _get_model_schema(self, model, depth=0) -> str: - indent = " " * depth - lines = [f"{indent}{{"] - for field_name, field in model.model_fields.items(): - field_type_str = self._get_field_type(field, depth + 1) - lines.append(f"{indent} {field_name}: {field_type_str},") - lines[-1] = lines[-1].rstrip(",") # Remove trailing comma from last item - lines.append(f"{indent}}}") - return "\n".join(lines) - - def _get_field_type(self, field, depth) -> str: + return "{\n" + self._get_model_schema(self.model) + "\n}" + + def _get_model_schema(self, model: Type[BaseModel], depth: int = 0) -> str: + indent = " " * 4 * depth + lines = [ + f"{indent} {field_name}: {self._get_field_type(field, depth + 1)}" + for field_name, field in model.model_fields.items() + ] + return ",\n".join(lines) + + def _get_field_type(self, field, depth: int) -> str: field_type = field.annotation - if get_origin(field_type) is list: + origin = get_origin(field_type) + + if origin in {list, List}: list_item_type = get_args(field_type)[0] - if isinstance(list_item_type, type) and issubclass( - list_item_type, BaseModel - ): - nested_schema = self._get_model_schema(list_item_type, depth + 1) - return f"List[\n{nested_schema}\n{' ' * 4 * depth}]" - else: - return f"List[{list_item_type.__name__}]" - elif get_origin(field_type) is Union: - union_args = get_args(field_type) - if type(None) in union_args: - non_none_type = next(arg for arg in union_args if arg is not type(None)) - return f"Optional[{self._get_field_type(field.__class__(annotation=non_none_type), depth)}]" + return self._format_list_type(list_item_type, depth) + + if origin in {dict, Dict}: + key_type, value_type = get_args(field_type) + return f"Dict[{key_type.__name__}, {value_type.__name__}]" + + if origin is Union: + return self._format_union_type(field_type, depth) + + if isinstance(field_type, type) and issubclass(field_type, BaseModel): + nested_schema = self._get_model_schema(field_type, depth) + nested_indent = " " * 4 * depth + return f"{field_type.__name__}\n{nested_indent}{{\n{nested_schema}\n{nested_indent}}}" + + return field_type.__name__ + + def _format_list_type(self, list_item_type, depth: int) -> str: + if isinstance(list_item_type, type) and issubclass(list_item_type, BaseModel): + nested_schema = self._get_model_schema(list_item_type, depth + 1) + nested_indent = " " * 4 * (depth) + return f"List[\n{nested_indent}{{\n{nested_schema}\n{nested_indent}}}\n{nested_indent}]" + return f"List[{list_item_type.__name__}]" + + def _format_union_type(self, field_type, depth: int) -> str: + args = get_args(field_type) + if type(None) in args: + # It's an Optional type + non_none_args = [arg for arg in args if arg is not type(None)] + if len(non_none_args) == 1: + inner_type = self._get_field_type_for_annotation( + non_none_args[0], depth + ) + return f"Optional[{inner_type}]" else: - return f"Union[{', '.join(arg.__name__ for arg in union_args)}]" - elif isinstance(field_type, type) and issubclass(field_type, BaseModel): - return self._get_model_schema(field_type, depth) + # Union with None and multiple other types + inner_types = ", ".join( + self._get_field_type_for_annotation(arg, depth) + for arg in non_none_args + ) + return f"Optional[Union[{inner_types}]]" else: - return getattr(field_type, "__name__", str(field_type)) + # General Union type + inner_types = ", ".join( + self._get_field_type_for_annotation(arg, depth) for arg in args + ) + return f"Union[{inner_types}]" + + def _get_field_type_for_annotation(self, annotation, depth: int) -> str: + origin = get_origin(annotation) + if origin in {list, List}: + list_item_type = get_args(annotation)[0] + return self._format_list_type(list_item_type, depth) + if origin in {dict, Dict}: + key_type, value_type = get_args(annotation) + return f"Dict[{key_type.__name__}, {value_type.__name__}]" + if origin is Union: + return self._format_union_type(annotation, depth) + if isinstance(annotation, type) and issubclass(annotation, BaseModel): + nested_schema = self._get_model_schema(annotation, depth) + nested_indent = " " * 4 * depth + return f"{annotation.__name__}\n{nested_indent}{{\n{nested_schema}\n{nested_indent}}}" + return annotation.__name__ diff --git a/tests/utilities/cassettes/test_convert_with_instructions.yaml b/tests/utilities/cassettes/test_convert_with_instructions.yaml new file mode 100644 index 0000000000..7e9b652471 --- /dev/null +++ b/tests/utilities/cassettes/test_convert_with_instructions.yaml @@ -0,0 +1,114 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": "Name: Alice, Age: 30"}], "model": + "gpt-4o-mini", "tool_choice": {"type": "function", "function": {"name": "SimpleModel"}}, + "tools": [{"type": "function", "function": {"name": "SimpleModel", "description": + "Correctly extracted `SimpleModel` with all the required parameters with correct + types", "parameters": {"properties": {"name": {"title": "Name", "type": "string"}, + "age": {"title": "Age", "type": "integer"}}, "required": ["age", "name"], "type": + "object"}}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '507' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.59.6 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.59.6 + x-stainless-raw-response: + - 'true' + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.7 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + content: "{\n \"id\": \"chatcmpl-Aq4a4xDv8G0i4fbTtPJEI2B8UNBup\",\n \"object\": + \"chat.completion\",\n \"created\": 1736974028,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n + \ \"id\": \"call_uO5nec8hTk1fpYINM8TUafhe\",\n \"type\": + \"function\",\n \"function\": {\n \"name\": \"SimpleModel\",\n + \ \"arguments\": \"{\\\"name\\\":\\\"Alice\\\",\\\"age\\\":30}\"\n + \ }\n }\n ],\n \"refusal\": null\n },\n + \ \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n ],\n + \ \"usage\": {\n \"prompt_tokens\": 79,\n \"completion_tokens\": 10,\n + \ \"total_tokens\": 89,\n \"prompt_tokens_details\": {\n \"cached_tokens\": + 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n + \ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": + 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\": + \"default\",\n \"system_fingerprint\": \"fp_72ed7ab54c\"\n}\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9028b81aeb1cb05f-ATL + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Wed, 15 Jan 2025 20:47:08 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=PzayZLF04c14veGc.0ocVg3VHBbpzKRW8Hqox8L9U7c-1736974028-1.0.1.1-mZpK8.SH9l7K2z8Tvt6z.dURiVPjFqEz7zYEITfRwdr5z0razsSebZGN9IRPmI5XC_w5rbZW2Kg6hh5cenXinQ; + path=/; expires=Wed, 15-Jan-25 21:17:08 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=ciwC3n2Srn20xx4JhEUeN6Ap0tNBaE44S95nIilboQ0-1736974028496-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '439' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149999978' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_a468000458b9d2848b7497b2e3d485a3 + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/tests/utilities/cassettes/test_converter_with_llama3_1_model.yaml b/tests/utilities/cassettes/test_converter_with_llama3_1_model.yaml new file mode 100644 index 0000000000..ca597b3edb --- /dev/null +++ b/tests/utilities/cassettes/test_converter_with_llama3_1_model.yaml @@ -0,0 +1,2048 @@ +interactions: +- request: + body: '{"model": "llama3.1", "prompt": "### User:\nName: Alice Llama, Age: 30\n\n### + System:\nProduce JSON OUTPUT ONLY! Adhere to this format {\"name\": \"function_name\", + \"arguments\":{\"argument_name\": \"argument_value\"}} The following functions + are available to you:\n{''type'': ''function'', ''function'': {''name'': ''SimpleModel'', + ''description'': ''Correctly extracted `SimpleModel` with all the required parameters + with correct types'', ''parameters'': {''properties'': {''name'': {''title'': + ''Name'', ''type'': ''string''}, ''age'': {''title'': ''Age'', ''type'': ''integer''}}, + ''required'': [''age'', ''name''], ''type'': ''object''}}}\n\n\n", "options": + {}, "stream": false, "format": "json"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '654' + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/generate + response: + content: '{"model":"llama3.1","created_at":"2025-01-15T20:47:17.068012Z","response":"{\"name\": + \"SimpleModel\", \"arguments\": {\"age\": \"30\", \"name\": \"Alice Llama\"}}","done":true,"done_reason":"stop","context":[128006,882,128007,271,14711,2724,512,678,25,30505,445,81101,11,13381,25,220,966,271,14711,744,512,1360,13677,4823,32090,27785,0,2467,6881,311,420,3645,5324,609,794,330,1723,1292,498,330,16774,23118,14819,1292,794,330,14819,3220,32075,578,2768,5865,527,2561,311,499,512,13922,1337,1232,364,1723,518,364,1723,1232,5473,609,1232,364,16778,1747,518,364,4789,1232,364,34192,398,28532,1595,16778,1747,63,449,682,279,2631,5137,449,4495,4595,518,364,14105,1232,5473,13495,1232,5473,609,1232,5473,2150,1232,364,678,518,364,1337,1232,364,928,25762,364,425,1232,5473,2150,1232,364,17166,518,364,1337,1232,364,11924,8439,2186,364,6413,1232,2570,425,518,364,609,4181,364,1337,1232,364,1735,23742,3818,128009,128006,78191,128007,271,5018,609,794,330,16778,1747,498,330,16774,794,5324,425,794,330,966,498,330,609,794,330,62786,445,81101,32075],"total_duration":4753211958,"load_duration":1084951250,"prompt_eval_count":152,"prompt_eval_duration":2906000000,"eval_count":25,"eval_duration":761000000}' + headers: + Content-Length: + - '1193' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:17 GMT + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.1"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '20' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama 3.1 Version + Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution and modification of the\\nLlama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.1\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\",\"modelfile\":\"# + Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based + on this, replace FROM with:\\n# FROM llama3.1:latest\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-87048bcd55216712ef14c11c2c303728463207b165bf18440b9b84b07ec00f87\\nTEMPLATE + \\\"\\\"\\\"{{ if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\\\"\\\"\\\"\\nPARAMETER + stop \\u003c|start_header_id|\\u003e\\nPARAMETER stop \\u003c|end_header_id|\\u003e\\nPARAMETER + stop \\u003c|eot_id|\\u003e\\nLICENSE \\\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama + 3.1 Version Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the + terms and conditions for use, reproduction, distribution and modification of + the\\nLlama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means + the specifications, manuals and documentation accompanying Llama 3.1\\ndistributed + by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D or + \u201Cyou\u201D means you, or your employer or any other person or entity (if + you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\\\"\\n\",\"parameters\":\"stop + \ \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"{{ + if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"8.0B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Meta-Llama-3.1\",\"general.file_type\":2,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.license\":\"llama3.1\",\"general.parameter_count\":8030261248,\"general.quantization_version\":2,\"general.size_label\":\"8B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":8,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.block_count\":32,\"llama.context_length\":131072,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":14336,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-08-01T11:38:16.96106256-04:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:17 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.1"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '20' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama 3.1 Version + Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution and modification of the\\nLlama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.1\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\",\"modelfile\":\"# + Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based + on this, replace FROM with:\\n# FROM llama3.1:latest\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-87048bcd55216712ef14c11c2c303728463207b165bf18440b9b84b07ec00f87\\nTEMPLATE + \\\"\\\"\\\"{{ if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\\\"\\\"\\\"\\nPARAMETER + stop \\u003c|start_header_id|\\u003e\\nPARAMETER stop \\u003c|end_header_id|\\u003e\\nPARAMETER + stop \\u003c|eot_id|\\u003e\\nLICENSE \\\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama + 3.1 Version Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the + terms and conditions for use, reproduction, distribution and modification of + the\\nLlama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means + the specifications, manuals and documentation accompanying Llama 3.1\\ndistributed + by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D or + \u201Cyou\u201D means you, or your employer or any other person or entity (if + you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\\\"\\n\",\"parameters\":\"stop + \ \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"{{ + if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"8.0B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Meta-Llama-3.1\",\"general.file_type\":2,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.license\":\"llama3.1\",\"general.parameter_count\":8030261248,\"general.quantization_version\":2,\"general.size_label\":\"8B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":8,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.block_count\":32,\"llama.context_length\":131072,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":14336,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-08-01T11:38:16.96106256-04:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:17 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.1"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '20' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama 3.1 Version + Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution and modification of the\\nLlama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.1\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\",\"modelfile\":\"# + Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based + on this, replace FROM with:\\n# FROM llama3.1:latest\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-87048bcd55216712ef14c11c2c303728463207b165bf18440b9b84b07ec00f87\\nTEMPLATE + \\\"\\\"\\\"{{ if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\\\"\\\"\\\"\\nPARAMETER + stop \\u003c|start_header_id|\\u003e\\nPARAMETER stop \\u003c|end_header_id|\\u003e\\nPARAMETER + stop \\u003c|eot_id|\\u003e\\nLICENSE \\\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama + 3.1 Version Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the + terms and conditions for use, reproduction, distribution and modification of + the\\nLlama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means + the specifications, manuals and documentation accompanying Llama 3.1\\ndistributed + by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D or + \u201Cyou\u201D means you, or your employer or any other person or entity (if + you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\\\"\\n\",\"parameters\":\"stop + \ \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"{{ + if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"8.0B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Meta-Llama-3.1\",\"general.file_type\":2,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.license\":\"llama3.1\",\"general.parameter_count\":8030261248,\"general.quantization_version\":2,\"general.size_label\":\"8B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":8,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.block_count\":32,\"llama.context_length\":131072,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":14336,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-08-01T11:38:16.96106256-04:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:17 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"model": "llama3.1", "prompt": "### User:\nName: Alice Llama, Age: 30\n\n### + Assistant:\nTool Calls: [\n {\n \"id\": \"call_5487de90-385d-48f4-843c-04b9dc635b23\",\n \"type\": + \"function\",\n \"function\": {\n \"name\": \"SimpleModel\",\n \"arguments\": + {\n \"age\": \"30\",\n \"name\": \"Alice Llama\"\n }\n }\n }\n]\n\n### + User:\nValidation Error found:\n1 validation error for SimpleModel\nage\n Input + should be a valid integer [type=int_type, input_value=''30'', input_type=str]\n For + further information visit https://errors.pydantic.dev/2.10/v/int_type\nRecall + the function correctly, fix the errors\n\n### System:\nProduce JSON OUTPUT ONLY! + Adhere to this format {\"name\": \"function_name\", \"arguments\":{\"argument_name\": + \"argument_value\"}} The following functions are available to you:\n{''type'': + ''function'', ''function'': {''name'': ''SimpleModel'', ''description'': ''Correctly + extracted `SimpleModel` with all the required parameters with correct types'', + ''parameters'': {''properties'': {''name'': {''title'': ''Name'', ''type'': + ''string''}, ''age'': {''title'': ''Age'', ''type'': ''integer''}}, ''required'': + [''age'', ''name''], ''type'': ''object''}}}\n\n\n", "options": {}, "stream": + false, "format": "json"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '1235' + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/generate + response: + content: '{"model":"llama3.1","created_at":"2025-01-15T20:47:19.399083Z","response":"{\"name\": + \"SimpleModel\", \"arguments\":{\"name\": \"Alice Llama\", \"age\": 30}}","done":true,"done_reason":"stop","context":[128006,882,128007,271,14711,2724,512,678,25,30505,445,81101,11,13381,25,220,966,271,14711,22103,512,7896,41227,25,2330,220,341,262,330,307,794,330,6797,62,22287,22,451,1954,12,18695,67,12,2166,69,19,12,23996,66,12,2371,65,24,7783,22276,65,1419,761,262,330,1337,794,330,1723,761,262,330,1723,794,341,415,330,609,794,330,16778,1747,761,415,330,16774,794,341,286,330,425,794,330,966,761,286,330,609,794,330,62786,445,81101,702,415,457,262,457,220,457,2595,14711,2724,512,14118,4703,1766,512,16,10741,1493,369,9170,1747,198,425,198,220,5688,1288,387,264,2764,7698,510,1337,16972,1857,11,1988,3220,1151,966,518,1988,1857,16311,933,262,1789,4726,2038,4034,3788,1129,7805,7345,67,8322,22247,14,17,13,605,5574,32214,1857,198,3905,543,279,734,12722,11,5155,279,6103,271,14711,744,512,1360,13677,4823,32090,27785,0,2467,6881,311,420,3645,5324,609,794,330,1723,1292,498,330,16774,23118,14819,1292,794,330,14819,3220,32075,578,2768,5865,527,2561,311,499,512,13922,1337,1232,364,1723,518,364,1723,1232,5473,609,1232,364,16778,1747,518,364,4789,1232,364,34192,398,28532,1595,16778,1747,63,449,682,279,2631,5137,449,4495,4595,518,364,14105,1232,5473,13495,1232,5473,609,1232,5473,2150,1232,364,678,518,364,1337,1232,364,928,25762,364,425,1232,5473,2150,1232,364,17166,518,364,1337,1232,364,11924,8439,2186,364,6413,1232,2570,425,518,364,609,4181,364,1337,1232,364,1735,23742,3818,128009,128006,78191,128007,271,5018,609,794,330,16778,1747,498,330,16774,23118,609,794,330,62786,445,81101,498,330,425,794,220,966,3500],"total_duration":1822667750,"load_duration":14204166,"prompt_eval_count":306,"prompt_eval_duration":1057000000,"eval_count":24,"eval_duration":749000000}' + headers: + Content-Length: + - '1859' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:19 GMT + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.1"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '20' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama 3.1 Version + Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution and modification of the\\nLlama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.1\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\",\"modelfile\":\"# + Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based + on this, replace FROM with:\\n# FROM llama3.1:latest\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-87048bcd55216712ef14c11c2c303728463207b165bf18440b9b84b07ec00f87\\nTEMPLATE + \\\"\\\"\\\"{{ if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\\\"\\\"\\\"\\nPARAMETER + stop \\u003c|start_header_id|\\u003e\\nPARAMETER stop \\u003c|end_header_id|\\u003e\\nPARAMETER + stop \\u003c|eot_id|\\u003e\\nLICENSE \\\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama + 3.1 Version Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the + terms and conditions for use, reproduction, distribution and modification of + the\\nLlama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means + the specifications, manuals and documentation accompanying Llama 3.1\\ndistributed + by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D or + \u201Cyou\u201D means you, or your employer or any other person or entity (if + you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\\\"\\n\",\"parameters\":\"stop + \ \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"{{ + if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"8.0B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Meta-Llama-3.1\",\"general.file_type\":2,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.license\":\"llama3.1\",\"general.parameter_count\":8030261248,\"general.quantization_version\":2,\"general.size_label\":\"8B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":8,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.block_count\":32,\"llama.context_length\":131072,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":14336,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-08-01T11:38:16.96106256-04:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:19 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.1"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '20' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama 3.1 Version + Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution and modification of the\\nLlama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.1\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\",\"modelfile\":\"# + Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based + on this, replace FROM with:\\n# FROM llama3.1:latest\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-87048bcd55216712ef14c11c2c303728463207b165bf18440b9b84b07ec00f87\\nTEMPLATE + \\\"\\\"\\\"{{ if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\\\"\\\"\\\"\\nPARAMETER + stop \\u003c|start_header_id|\\u003e\\nPARAMETER stop \\u003c|end_header_id|\\u003e\\nPARAMETER + stop \\u003c|eot_id|\\u003e\\nLICENSE \\\"LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\\nLlama + 3.1 Version Release Date: July 23, 2024\\n\\n\u201CAgreement\u201D means the + terms and conditions for use, reproduction, distribution and modification of + the\\nLlama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means + the specifications, manuals and documentation accompanying Llama 3.1\\ndistributed + by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D or + \u201Cyou\u201D means you, or your employer or any other person or entity (if + you are entering into\\nthis Agreement on such person or entity\u2019s behalf), + of the age required under applicable laws, rules or\\nregulations to provide + legal consent and that has legal authority to bind your employer or such other\\nperson + or entity if you are entering in this Agreement on their behalf.\\n\\n\u201CLlama + 3.1\u201D means the foundational large language models and software and algorithms, + including\\nmachine-learning model code, trained model weights, inference-enabling + code, training-enabling code,\\nfine-tuning enabling code and other elements + of the foregoing distributed by Meta at\\nhttps://llama.meta.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.1 and Documentation + (and any\\nportion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, if you are an entity, your\\nprincipal place of business is in the EEA or + Switzerland) and Meta Platforms, Inc. (if you are located\\noutside of the EEA + or Switzerland).\\n\\nBy clicking \u201CI Accept\u201D below or by using or + distributing any portion or element of the Llama Materials,\\nyou agree to be + bound by this Agreement.\\n\\n1. License Rights and Redistribution.\\n\\n a. + Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable + and royalty-free\\nlimited license under Meta\u2019s intellectual property or + other rights owned by Meta embodied in the Llama\\nMaterials to use, reproduce, + distribute, copy, create derivative works of, and make modifications to the\\nLlama + Materials.\\n\\n b. Redistribution and Use.\\n\\n i. If you distribute + or make available the Llama Materials (or any derivative works\\nthereof), or + a product or service (including another AI model) that contains any of them, + you shall (A)\\nprovide a copy of this Agreement with any such Llama Materials; + and (B) prominently display \u201CBuilt with\\nLlama\u201D on a related website, + user interface, blogpost, about page, or product documentation. If you use\\nthe + Llama Materials or any outputs or results of the Llama Materials to create, + train, fine tune, or\\notherwise improve an AI model, which is distributed or + made available, you shall also include \u201CLlama\u201D at\\nthe beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, or + any derivative works thereof, from a Licensee as part \\nof an integrated end + user product, then Section 2 of this Agreement will not apply to you.\\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the following\\nattribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \u201CLlama 3.1 is\\nlicensed under the Llama 3.1 + Community License, Copyright \xA9 Meta Platforms, Inc. All Rights\\nReserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws and + regulations\\n(including trade compliance laws and regulations) and adhere to + the Acceptable Use Policy for the Llama\\nMaterials (available at https://llama.meta.com/llama3_1/use-policy), + which is hereby incorporated by\\nreference into this Agreement.\\n\\n2. Additional + Commercial Terms. If, on the Llama 3.1 version release date, the monthly active + users\\nof the products or services made available by or for Licensee, or Licensee\u2019s + affiliates, is greater than 700\\nmillion monthly active users in the preceding + calendar month, you must request a license from Meta,\\nwhich Meta may grant + to you in its sole discretion, and you are not authorized to exercise any of + the\\nrights under this Agreement unless or until Meta otherwise expressly grants + you such rights.\\n\\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE + LAW, THE LLAMA MATERIALS AND ANY\\nOUTPUT AND RESULTS THEREFROM ARE PROVIDED + ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF\\nANY KIND, AND META DISCLAIMS + ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,\\nINCLUDING, WITHOUT LIMITATION, + ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,\\nMERCHANTABILITY, OR FITNESS FOR + A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR\\nDETERMINING THE APPROPRIATENESS + OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND\\nASSUME ANY RISKS ASSOCIATED + WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND\\nRESULTS.\\n\\n4. Limitation + of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY + OF\\nLIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR + OTHERWISE, ARISING\\nOUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, + SPECIAL, CONSEQUENTIAL,\\nINCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF + META OR ITS AFFILIATES HAVE BEEN ADVISED\\nOF THE POSSIBILITY OF ANY OF THE + FOREGOING.\\n\\n5. Intellectual Property.\\n\\n a. No trademark licenses are + granted under this Agreement, and in connection with the Llama\\nMaterials, + neither Meta nor Licensee may use any name or mark owned by or associated with + the other\\nor any of its affiliates, except as required for reasonable and + customary use in describing and\\nredistributing the Llama Materials or as set + forth in this Section 5(a). Meta hereby grants you a license to\\nuse \u201CLlama\u201D + (the \u201CMark\u201D) solely as required to comply with the last sentence of + Section 1.b.i. You will\\ncomply with Meta\u2019s brand guidelines (currently + accessible at\\nhttps://about.meta.com/brand/resources/meta/company-brand/ ). + All goodwill arising out of your use\\nof the Mark will inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and derivatives + made by or for Meta, with\\nrespect to any derivative works and modifications + of the Llama Materials that are made by you, as\\nbetween you and Meta, you + are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any entity + (including a\\ncross-claim or counterclaim in a lawsuit) alleging that the Llama + Materials or Llama 3.1 outputs or\\nresults, or any portion of any of the foregoing, + constitutes infringement of intellectual property or other\\nrights owned or + licensable by you, then any licenses granted to you under this Agreement shall\\nterminate + as of the date such litigation or claim is filed or instituted. You will indemnify + and hold\\nharmless Meta from and against any claim by any third party arising + out of or related to your use or\\ndistribution of the Llama Materials.\\n\\n6. + Term and Termination. The term of this Agreement will commence upon your acceptance + of this\\nAgreement or access to the Llama Materials and will continue in full + force and effect until terminated in\\naccordance with the terms and conditions + herein. Meta may terminate this Agreement if you are in\\nbreach of any term + or condition of this Agreement. Upon termination of this Agreement, you shall + delete\\nand cease use of the Llama Materials. Sections 3, 4 and 7 shall survive + the termination of this\\nAgreement.\\n\\n7. Governing Law and Jurisdiction. + This Agreement will be governed and construed under the laws of\\nthe State + of California without regard to choice of law principles, and the UN Convention + on Contracts\\nfor the International Sale of Goods does not apply to this Agreement. + The courts of California shall have\\nexclusive jurisdiction of any dispute + arising out of this Agreement.\\n\\n# Llama 3.1 Acceptable Use Policy\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.1. If you\\naccess or use Llama 3.1, you agree to this Acceptable Use + Policy (\u201CPolicy\u201D). The most recent copy of\\nthis policy can be found + at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\\n\\n## + Prohibited Uses\\n\\nWe want everyone to use Llama 3.1 safely and responsibly. + You agree you will not use, or allow\\nothers to use, Llama 3.1 to:\\n\\n1. + Violate the law or others\u2019 rights, including to:\\n 1. Engage in, promote, + generate, contribute to, encourage, plan, incite, or further illegal or unlawful + activity or content, such as:\\n 1. Violence or terrorism\\n 2. + Exploitation or harm to children, including the solicitation, creation, acquisition, + or dissemination of child exploitative content or failure to report Child Sexual + Abuse Material\\n 3. Human trafficking, exploitation, and sexual violence\\n + \ 4. The illegal distribution of information or materials to minors, including + obscene materials, or failure to employ legally required age-gating in connection + with such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 3. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 4. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 5. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 6. Collect, process, disclose, generate, + or infer health, demographic, or other sensitive personal or private information + about individuals without rights and consents required by applicable laws\\n + \ 7. Engage in or facilitate any action or generate any content that infringes, + misappropriates, or otherwise violates any third-party rights, including the + outputs or results of any products or services using the Llama Materials\\n + \ 8. Create, generate, or facilitate the creation of malicious code, malware, + computer viruses or do anything else that could disable, overburden, interfere + with or impair the proper working, integrity, operation or appearance of a website + or computer system\\n\\n2. Engage in, promote, incite, facilitate, or assist + in the planning or development of activities that present a risk of death or + bodily harm to individuals, including use of Llama 3.1 related to the following:\\n + \ 1. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State\\n 2. + Guns and illegal weapons (including weapon development)\\n 3. Illegal drugs + and regulated/controlled substances\\n 4. Operation of critical infrastructure, + transportation technologies, or heavy machinery\\n 5. Self-harm or harm to + others, including suicide, cutting, and eating disorders\\n 6. Any content + intended to incite or promote violence, abuse, or any infliction of bodily harm + to an individual\\n\\n3. Intentionally deceive or mislead others, including + use of Llama 3.1 related to the following:\\n 1. Generating, promoting, or + furthering fraud or the creation or promotion of disinformation\\n 2. Generating, + promoting, or furthering defamatory content, including the creation of defamatory + statements, images, or other content\\n 3. Generating, promoting, or further + distributing spam\\n 4. Impersonating another individual without consent, + authorization, or legal right\\n 5. Representing that the use of Llama 3.1 + or outputs are human-generated\\n 6. Generating or facilitating false online + engagement, including fake reviews and other means of fake online engagement\\n\\n4. + Fail to appropriately disclose to end users any known dangers of your AI system\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation\\nof this Policy through one of the following + means:\\n\\n* Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\\n* + Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback\\n* + Reporting bugs and security concerns: facebook.com/whitehat/info\\n* Reporting + violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com\\n\\\"\\n\",\"parameters\":\"stop + \ \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"{{ + if .Messages }}\\n{{- if or .System .Tools }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n{{- + if .System }}\\n\\n{{ .System }}\\n{{- end }}\\n{{- if .Tools }}\\n\\nYou are + a helpful assistant with tool calling capabilities. When you receive a tool + call response, use the output to format an answer to the orginal use question.\\n{{- + end }}\\u003c|eot_id|\\u003e\\n{{- end }}\\n{{- range $i, $_ := .Messages }}\\n{{- + $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- if eq .Role \\\"user\\\" + }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- if + and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ $.Tools + }}\\n{{- end }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n\\n{{- range .ToolCalls }}{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\n{{- else }}\\n{{- if .System }}\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\n{{ + .System }}\\u003c|eot_id|\\u003e{{ end }}{{ if .Prompt }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n\\n{{ + .Prompt }}\\u003c|eot_id|\\u003e{{ end }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}{{ .Response }}{{ if .Response }}\\u003c|eot_id|\\u003e{{ end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"8.0B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Meta-Llama-3.1\",\"general.file_type\":2,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.license\":\"llama3.1\",\"general.parameter_count\":8030261248,\"general.quantization_version\":2,\"general.size_label\":\"8B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":8,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.block_count\":32,\"llama.context_length\":131072,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":14336,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-08-01T11:38:16.96106256-04:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:19 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/tests/utilities/cassettes/test_converter_with_llama3_2_model.yaml b/tests/utilities/cassettes/test_converter_with_llama3_2_model.yaml new file mode 100644 index 0000000000..fdcb661a8b --- /dev/null +++ b/tests/utilities/cassettes/test_converter_with_llama3_2_model.yaml @@ -0,0 +1,869 @@ +interactions: +- request: + body: '{"model": "llama3.2:3b", "prompt": "### User:\nName: Alice Llama, Age: + 30\n\n### System:\nProduce JSON OUTPUT ONLY! Adhere to this format {\"name\": + \"function_name\", \"arguments\":{\"argument_name\": \"argument_value\"}} The + following functions are available to you:\n{''type'': ''function'', ''function'': + {''name'': ''SimpleModel'', ''description'': ''Correctly extracted `SimpleModel` + with all the required parameters with correct types'', ''parameters'': {''properties'': + {''name'': {''title'': ''Name'', ''type'': ''string''}, ''age'': {''title'': + ''Age'', ''type'': ''integer''}}, ''required'': [''age'', ''name''], ''type'': + ''object''}}}\n\n\n", "options": {}, "stream": false, "format": "json"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '657' + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/generate + response: + content: '{"model":"llama3.2:3b","created_at":"2025-01-15T20:47:11.926411Z","response":"{\"name\": + \"SimpleModel\", \"arguments\":{\"name\": \"Alice Llama\", \"age\": 30}}","done":true,"done_reason":"stop","context":[128006,9125,128007,271,38766,1303,33025,2696,25,6790,220,2366,18,271,128009,128006,882,128007,271,14711,2724,512,678,25,30505,445,81101,11,13381,25,220,966,271,14711,744,512,1360,13677,4823,32090,27785,0,2467,6881,311,420,3645,5324,609,794,330,1723,1292,498,330,16774,23118,14819,1292,794,330,14819,3220,32075,578,2768,5865,527,2561,311,499,512,13922,1337,1232,364,1723,518,364,1723,1232,5473,609,1232,364,16778,1747,518,364,4789,1232,364,34192,398,28532,1595,16778,1747,63,449,682,279,2631,5137,449,4495,4595,518,364,14105,1232,5473,13495,1232,5473,609,1232,5473,2150,1232,364,678,518,364,1337,1232,364,928,25762,364,425,1232,5473,2150,1232,364,17166,518,364,1337,1232,364,11924,8439,2186,364,6413,1232,2570,425,518,364,609,4181,364,1337,1232,364,1735,23742,3818,128009,128006,78191,128007,271,5018,609,794,330,16778,1747,498,330,16774,23118,609,794,330,62786,445,81101,498,330,425,794,220,966,3500],"total_duration":3374470708,"load_duration":1075750500,"prompt_eval_count":167,"prompt_eval_duration":1871000000,"eval_count":24,"eval_duration":426000000}' + headers: + Content-Length: + - '1263' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:12 GMT + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.2:3b"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '23' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.2 COMMUNITY LICENSE AGREEMENT\\nLlama 3.2 Version + Release Date: September 25, 2024\\n\\n\u201CAgreement\u201D means the terms + and conditions for use, reproduction, distribution \\nand modification of the + Llama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, + manuals and documentation accompanying Llama 3.2\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are \\nentering into this Agreement on such person or entity\u2019s + behalf), of the age required under\\napplicable laws, rules or regulations to + provide legal consent and that has legal authority\\nto bind your employer or + such other person or entity if you are entering in this Agreement\\non their + behalf.\\n\\n\u201CLlama 3.2\u201D means the foundational large language models + and software and algorithms, including\\nmachine-learning model code, trained + model weights, inference-enabling code, training-enabling code,\\nfine-tuning + enabling code and other elements of the foregoing distributed by Meta at \\nhttps://www.llama.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.2 and Documentation + (and \\nany portion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, \\nif you are an entity, your principal place of business is in the EEA + or Switzerland) \\nand Meta Platforms, Inc. (if you are located outside of the + EEA or Switzerland). \\n\\n\\nBy clicking \u201CI Accept\u201D below or by using + or distributing any portion or element of the Llama Materials,\\nyou agree to + be bound by this Agreement.\\n\\n\\n1. License Rights and Redistribution.\\n\\n + \ a. Grant of Rights. You are granted a non-exclusive, worldwide, \\nnon-transferable + and royalty-free limited license under Meta\u2019s intellectual property or + other rights \\nowned by Meta embodied in the Llama Materials to use, reproduce, + distribute, copy, create derivative works \\nof, and make modifications to the + Llama Materials. \\n\\n b. Redistribution and Use. \\n\\n i. If + you distribute or make available the Llama Materials (or any derivative works + thereof), \\nor a product or service (including another AI model) that contains + any of them, you shall (A) provide\\na copy of this Agreement with any such + Llama Materials; and (B) prominently display \u201CBuilt with Llama\u201D\\non + a related website, user interface, blogpost, about page, or product documentation. + If you use the\\nLlama Materials or any outputs or results of the Llama Materials + to create, train, fine tune, or\\notherwise improve an AI model, which is distributed + or made available, you shall also include \u201CLlama\u201D\\nat the beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, + or any derivative works thereof, from a Licensee as part\\nof an integrated + end user product, then Section 2 of this Agreement will not apply to you. \\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the \\nfollowing attribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \\n\u201CLlama 3.2 is licensed under the Llama 3.2 + Community License, Copyright \xA9 Meta Platforms,\\nInc. All Rights Reserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws + and regulations\\n(including trade compliance laws and regulations) and adhere + to the Acceptable Use Policy for\\nthe Llama Materials (available at https://www.llama.com/llama3_2/use-policy), + which is hereby \\nincorporated by reference into this Agreement.\\n \\n2. + Additional Commercial Terms. If, on the Llama 3.2 version release date, the + monthly active users\\nof the products or services made available by or for + Licensee, or Licensee\u2019s affiliates, \\nis greater than 700 million monthly + active users in the preceding calendar month, you must request \\na license + from Meta, which Meta may grant to you in its sole discretion, and you are not + authorized to\\nexercise any of the rights under this Agreement unless or until + Meta otherwise expressly grants you such rights.\\n\\n3. Disclaimer of Warranty. + UNLESS REQUIRED BY APPLICABLE LAW, THE LLAMA MATERIALS AND ANY OUTPUT AND \\nRESULTS + THEREFROM ARE PROVIDED ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF + ANY KIND, AND META DISCLAIMS\\nALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND + IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\\nOF TITLE, NON-INFRINGEMENT, + MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE\\nFOR + DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE LLAMA MATERIALS + AND ASSUME ANY RISKS ASSOCIATED\\nWITH YOUR USE OF THE LLAMA MATERIALS AND ANY + OUTPUT AND RESULTS.\\n\\n4. Limitation of Liability. IN NO EVENT WILL META OR + ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, \\nWHETHER IN CONTRACT, + TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, + \\nFOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, + EXEMPLARY OR PUNITIVE DAMAGES, EVEN \\nIF META OR ITS AFFILIATES HAVE BEEN ADVISED + OF THE POSSIBILITY OF ANY OF THE FOREGOING.\\n\\n5. Intellectual Property.\\n\\n + \ a. No trademark licenses are granted under this Agreement, and in connection + with the Llama Materials, \\nneither Meta nor Licensee may use any name or mark + owned by or associated with the other or any of its affiliates, \\nexcept as + required for reasonable and customary use in describing and redistributing the + Llama Materials or as \\nset forth in this Section 5(a). Meta hereby grants + you a license to use \u201CLlama\u201D (the \u201CMark\u201D) solely as required + \\nto comply with the last sentence of Section 1.b.i. You will comply with Meta\u2019s + brand guidelines (currently accessible \\nat https://about.meta.com/brand/resources/meta/company-brand/). + All goodwill arising out of your use of the Mark \\nwill inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and + derivatives made by or for Meta, with respect to any\\n derivative works + and modifications of the Llama Materials that are made by you, as between you + and Meta,\\n you are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any + entity (including a cross-claim or\\n counterclaim in a lawsuit) alleging + that the Llama Materials or Llama 3.2 outputs or results, or any portion\\n + \ of any of the foregoing, constitutes infringement of intellectual property + or other rights owned or licensable\\n by you, then any licenses granted + to you under this Agreement shall terminate as of the date such litigation or\\n + \ claim is filed or instituted. You will indemnify and hold harmless Meta + from and against any claim by any third\\n party arising out of or related + to your use or distribution of the Llama Materials.\\n\\n6. Term and Termination. + The term of this Agreement will commence upon your acceptance of this Agreement + or access\\nto the Llama Materials and will continue in full force and effect + until terminated in accordance with the terms\\nand conditions herein. Meta + may terminate this Agreement if you are in breach of any term or condition of + this\\nAgreement. Upon termination of this Agreement, you shall delete and cease + use of the Llama Materials. Sections 3,\\n4 and 7 shall survive the termination + of this Agreement. \\n\\n7. Governing Law and Jurisdiction. This Agreement will + be governed and construed under the laws of the State of \\nCalifornia without + regard to choice of law principles, and the UN Convention on Contracts for the + International\\nSale of Goods does not apply to this Agreement. The courts of + California shall have exclusive jurisdiction of\\nany dispute arising out of + this Agreement.\\n**Llama 3.2** **Acceptable Use Policy**\\n\\nMeta is committed + to promoting safe and fair use of its tools and features, including Llama 3.2. + If you access or use Llama 3.2, you agree to this Acceptable Use Policy (\u201C**Policy**\u201D). + The most recent copy of this policy can be found at [https://www.llama.com/llama3_2/use-policy](https://www.llama.com/llama3_2/use-policy).\\n\\n**Prohibited + Uses**\\n\\nWe want everyone to use Llama 3.2 safely and responsibly. You agree + you will not use, or allow others to use, Llama 3.2 to:\\n\\n\\n\\n1. Violate + the law or others\u2019 rights, including to:\\n 1. Engage in, promote, generate, + contribute to, encourage, plan, incite, or further illegal or unlawful activity + or content, such as:\\n 1. Violence or terrorism\\n 2. Exploitation + or harm to children, including the solicitation, creation, acquisition, or dissemination + of child exploitative content or failure to report Child Sexual Abuse Material\\n + \ 3. Human trafficking, exploitation, and sexual violence\\n 4. + The illegal distribution of information or materials to minors, including obscene + materials, or failure to employ legally required age-gating in connection with + such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 1. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 2. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 3. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 4. Collect, process, disclose, generate, + or infer private or sensitive information about individuals, including information + about individuals\u2019 identity, health, or demographic information, unless + you have obtained the right to do so in accordance with applicable law\\n 5. + Engage in or facilitate any action or generate any content that infringes, misappropriates, + or otherwise violates any third-party rights, including the outputs or results + of any products or services using the Llama Materials\\n 6. Create, generate, + or facilitate the creation of malicious code, malware, computer viruses or do + anything else that could disable, overburden, interfere with or impair the proper + working, integrity, operation or appearance of a website or computer system\\n + \ 7. Engage in any action, or facilitate any action, to intentionally circumvent + or remove usage restrictions or other safety measures, or to enable functionality + disabled by Meta\\n2. Engage in, promote, incite, facilitate, or assist in the + planning or development of activities that present a risk of death or bodily + harm to individuals, including use of Llama 3.2 related to the following:\\n + \ 8. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State or to + the U.S. Biological Weapons Anti-Terrorism Act of 1989 or the Chemical Weapons + Convention Implementation Act of 1997\\n 9. Guns and illegal weapons (including + weapon development)\\n 10. Illegal drugs and regulated/controlled substances\\n + \ 11. Operation of critical infrastructure, transportation technologies, or + heavy machinery\\n 12. Self-harm or harm to others, including suicide, cutting, + and eating disorders\\n 13. Any content intended to incite or promote violence, + abuse, or any infliction of bodily harm to an individual\\n3. Intentionally + deceive or mislead others, including use of Llama 3.2 related to the following:\\n + \ 14. Generating, promoting, or furthering fraud or the creation or promotion + of disinformation\\n 15. Generating, promoting, or furthering defamatory + content, including the creation of defamatory statements, images, or other content\\n + \ 16. Generating, promoting, or further distributing spam\\n 17. Impersonating + another individual without consent, authorization, or legal right\\n 18. + Representing that the use of Llama 3.2 or outputs are human-generated\\n 19. + Generating or facilitating false online engagement, including fake reviews and + other means of fake online engagement\\n4. Fail to appropriately disclose to + end users any known dangers of your AI system\\n5. Interact with third party + tools, models, or software designed to generate unlawful content or engage in + unlawful or harmful conduct and/or represent that the outputs of such tools, + models, or software are associated with Meta or Llama 3.2\\n\\nWith respect + to any multimodal models included in Llama 3.2, the rights granted under Section + 1(a) of the Llama 3.2 Community License Agreement are not being granted to you + if you are an individual domiciled in, or a company with a principal place of + business in, the European Union. This restriction does not apply to end users + of a product or service that incorporates any such multimodal models.\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation of this Policy through one of the following means:\\n\\n\\n\\n* + Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://l.workplace.com/l.php?u=https%3A%2F%2Fgithub.com%2Fmeta-llama%2Fllama-models%2Fissues\\u0026h=AT0qV8W9BFT6NwihiOHRuKYQM_UnkzN_NmHMy91OT55gkLpgi4kQupHUl0ssR4dQsIQ8n3tfd0vtkobvsEvt1l4Ic6GXI2EeuHV8N08OG2WnbAmm0FL4ObkazC6G_256vN0lN9DsykCvCqGZ)\\n* + Reporting risky content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback)\\n* + Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info)\\n* + Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama + 3.2: LlamaUseReport@meta.com\",\"modelfile\":\"# Modelfile generated by \\\"ollama + show\\\"\\n# To build a new Modelfile based on this, replace FROM with:\\n# + FROM llama3.2:3b\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-dde5aa3fc5ffc17176b5e8bdc82f587b24b2678c6c66101bf7da77af9f7ccdff\\nTEMPLATE + \\\"\\\"\\\"\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\nCutting + Knowledge Date: December 2023\\n\\n{{ if .System }}{{ .System }}\\n{{- end }}\\n{{- + if .Tools }}When you receive a tool call response, use the output to format + an answer to the orginal user question.\\n\\nYou are a helpful assistant with + tool calling capabilities.\\n{{- end }}\\u003c|eot_id|\\u003e\\n{{- range $i, + $_ := .Messages }}\\n{{- $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- + if eq .Role \\\"user\\\" }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- + if and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ range + $.Tools }}\\n{{- . }}\\n{{ end }}\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- + else }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- end }}{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n{{ range .ToolCalls }}\\n{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}\\n{{- end }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\\"\\\"\\\"\\nPARAMETER stop \\u003c|start_header_id|\\u003e\\nPARAMETER + stop \\u003c|end_header_id|\\u003e\\nPARAMETER stop \\u003c|eot_id|\\u003e\\nLICENSE + \\\"LLAMA 3.2 COMMUNITY LICENSE AGREEMENT\\nLlama 3.2 Version Release Date: + September 25, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution \\nand modification of the Llama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.2\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are \\nentering into this Agreement on such person or entity\u2019s + behalf), of the age required under\\napplicable laws, rules or regulations to + provide legal consent and that has legal authority\\nto bind your employer or + such other person or entity if you are entering in this Agreement\\non their + behalf.\\n\\n\u201CLlama 3.2\u201D means the foundational large language models + and software and algorithms, including\\nmachine-learning model code, trained + model weights, inference-enabling code, training-enabling code,\\nfine-tuning + enabling code and other elements of the foregoing distributed by Meta at \\nhttps://www.llama.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.2 and Documentation + (and \\nany portion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, \\nif you are an entity, your principal place of business is in the EEA + or Switzerland) \\nand Meta Platforms, Inc. (if you are located outside of the + EEA or Switzerland). \\n\\n\\nBy clicking \u201CI Accept\u201D below or by using + or distributing any portion or element of the Llama Materials,\\nyou agree to + be bound by this Agreement.\\n\\n\\n1. License Rights and Redistribution.\\n\\n + \ a. Grant of Rights. You are granted a non-exclusive, worldwide, \\nnon-transferable + and royalty-free limited license under Meta\u2019s intellectual property or + other rights \\nowned by Meta embodied in the Llama Materials to use, reproduce, + distribute, copy, create derivative works \\nof, and make modifications to the + Llama Materials. \\n\\n b. Redistribution and Use. \\n\\n i. If + you distribute or make available the Llama Materials (or any derivative works + thereof), \\nor a product or service (including another AI model) that contains + any of them, you shall (A) provide\\na copy of this Agreement with any such + Llama Materials; and (B) prominently display \u201CBuilt with Llama\u201D\\non + a related website, user interface, blogpost, about page, or product documentation. + If you use the\\nLlama Materials or any outputs or results of the Llama Materials + to create, train, fine tune, or\\notherwise improve an AI model, which is distributed + or made available, you shall also include \u201CLlama\u201D\\nat the beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, + or any derivative works thereof, from a Licensee as part\\nof an integrated + end user product, then Section 2 of this Agreement will not apply to you. \\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the \\nfollowing attribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \\n\u201CLlama 3.2 is licensed under the Llama 3.2 + Community License, Copyright \xA9 Meta Platforms,\\nInc. All Rights Reserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws + and regulations\\n(including trade compliance laws and regulations) and adhere + to the Acceptable Use Policy for\\nthe Llama Materials (available at https://www.llama.com/llama3_2/use-policy), + which is hereby \\nincorporated by reference into this Agreement.\\n \\n2. + Additional Commercial Terms. If, on the Llama 3.2 version release date, the + monthly active users\\nof the products or services made available by or for + Licensee, or Licensee\u2019s affiliates, \\nis greater than 700 million monthly + active users in the preceding calendar month, you must request \\na license + from Meta, which Meta may grant to you in its sole discretion, and you are not + authorized to\\nexercise any of the rights under this Agreement unless or until + Meta otherwise expressly grants you such rights.\\n\\n3. Disclaimer of Warranty. + UNLESS REQUIRED BY APPLICABLE LAW, THE LLAMA MATERIALS AND ANY OUTPUT AND \\nRESULTS + THEREFROM ARE PROVIDED ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF + ANY KIND, AND META DISCLAIMS\\nALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND + IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\\nOF TITLE, NON-INFRINGEMENT, + MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE\\nFOR + DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE LLAMA MATERIALS + AND ASSUME ANY RISKS ASSOCIATED\\nWITH YOUR USE OF THE LLAMA MATERIALS AND ANY + OUTPUT AND RESULTS.\\n\\n4. Limitation of Liability. IN NO EVENT WILL META OR + ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, \\nWHETHER IN CONTRACT, + TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, + \\nFOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, + EXEMPLARY OR PUNITIVE DAMAGES, EVEN \\nIF META OR ITS AFFILIATES HAVE BEEN ADVISED + OF THE POSSIBILITY OF ANY OF THE FOREGOING.\\n\\n5. Intellectual Property.\\n\\n + \ a. No trademark licenses are granted under this Agreement, and in connection + with the Llama Materials, \\nneither Meta nor Licensee may use any name or mark + owned by or associated with the other or any of its affiliates, \\nexcept as + required for reasonable and customary use in describing and redistributing the + Llama Materials or as \\nset forth in this Section 5(a). Meta hereby grants + you a license to use \u201CLlama\u201D (the \u201CMark\u201D) solely as required + \\nto comply with the last sentence of Section 1.b.i. You will comply with Meta\u2019s + brand guidelines (currently accessible \\nat https://about.meta.com/brand/resources/meta/company-brand/). + All goodwill arising out of your use of the Mark \\nwill inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and + derivatives made by or for Meta, with respect to any\\n derivative works + and modifications of the Llama Materials that are made by you, as between you + and Meta,\\n you are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any + entity (including a cross-claim or\\n counterclaim in a lawsuit) alleging + that the Llama Materials or Llama 3.2 outputs or results, or any portion\\n + \ of any of the foregoing, constitutes infringement of intellectual property + or other rights owned or licensable\\n by you, then any licenses granted + to you under this Agreement shall terminate as of the date such litigation or\\n + \ claim is filed or instituted. You will indemnify and hold harmless Meta + from and against any claim by any third\\n party arising out of or related + to your use or distribution of the Llama Materials.\\n\\n6. Term and Termination. + The term of this Agreement will commence upon your acceptance of this Agreement + or access\\nto the Llama Materials and will continue in full force and effect + until terminated in accordance with the terms\\nand conditions herein. Meta + may terminate this Agreement if you are in breach of any term or condition of + this\\nAgreement. Upon termination of this Agreement, you shall delete and cease + use of the Llama Materials. Sections 3,\\n4 and 7 shall survive the termination + of this Agreement. \\n\\n7. Governing Law and Jurisdiction. This Agreement will + be governed and construed under the laws of the State of \\nCalifornia without + regard to choice of law principles, and the UN Convention on Contracts for the + International\\nSale of Goods does not apply to this Agreement. The courts of + California shall have exclusive jurisdiction of\\nany dispute arising out of + this Agreement.\\\"\\nLICENSE \\\"**Llama 3.2** **Acceptable Use Policy**\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.2. If you access or use Llama 3.2, you agree to this Acceptable Use + Policy (\u201C**Policy**\u201D). The most recent copy of this policy can be + found at [https://www.llama.com/llama3_2/use-policy](https://www.llama.com/llama3_2/use-policy).\\n\\n**Prohibited + Uses**\\n\\nWe want everyone to use Llama 3.2 safely and responsibly. You agree + you will not use, or allow others to use, Llama 3.2 to:\\n\\n\\n\\n1. Violate + the law or others\u2019 rights, including to:\\n 1. Engage in, promote, generate, + contribute to, encourage, plan, incite, or further illegal or unlawful activity + or content, such as:\\n 1. Violence or terrorism\\n 2. Exploitation + or harm to children, including the solicitation, creation, acquisition, or dissemination + of child exploitative content or failure to report Child Sexual Abuse Material\\n + \ 3. Human trafficking, exploitation, and sexual violence\\n 4. + The illegal distribution of information or materials to minors, including obscene + materials, or failure to employ legally required age-gating in connection with + such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 1. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 2. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 3. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 4. Collect, process, disclose, generate, + or infer private or sensitive information about individuals, including information + about individuals\u2019 identity, health, or demographic information, unless + you have obtained the right to do so in accordance with applicable law\\n 5. + Engage in or facilitate any action or generate any content that infringes, misappropriates, + or otherwise violates any third-party rights, including the outputs or results + of any products or services using the Llama Materials\\n 6. Create, generate, + or facilitate the creation of malicious code, malware, computer viruses or do + anything else that could disable, overburden, interfere with or impair the proper + working, integrity, operation or appearance of a website or computer system\\n + \ 7. Engage in any action, or facilitate any action, to intentionally circumvent + or remove usage restrictions or other safety measures, or to enable functionality + disabled by Meta\\n2. Engage in, promote, incite, facilitate, or assist in the + planning or development of activities that present a risk of death or bodily + harm to individuals, including use of Llama 3.2 related to the following:\\n + \ 8. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State or to + the U.S. Biological Weapons Anti-Terrorism Act of 1989 or the Chemical Weapons + Convention Implementation Act of 1997\\n 9. Guns and illegal weapons (including + weapon development)\\n 10. Illegal drugs and regulated/controlled substances\\n + \ 11. Operation of critical infrastructure, transportation technologies, or + heavy machinery\\n 12. Self-harm or harm to others, including suicide, cutting, + and eating disorders\\n 13. Any content intended to incite or promote violence, + abuse, or any infliction of bodily harm to an individual\\n3. Intentionally + deceive or mislead others, including use of Llama 3.2 related to the following:\\n + \ 14. Generating, promoting, or furthering fraud or the creation or promotion + of disinformation\\n 15. Generating, promoting, or furthering defamatory + content, including the creation of defamatory statements, images, or other content\\n + \ 16. Generating, promoting, or further distributing spam\\n 17. Impersonating + another individual without consent, authorization, or legal right\\n 18. + Representing that the use of Llama 3.2 or outputs are human-generated\\n 19. + Generating or facilitating false online engagement, including fake reviews and + other means of fake online engagement\\n4. Fail to appropriately disclose to + end users any known dangers of your AI system\\n5. Interact with third party + tools, models, or software designed to generate unlawful content or engage in + unlawful or harmful conduct and/or represent that the outputs of such tools, + models, or software are associated with Meta or Llama 3.2\\n\\nWith respect + to any multimodal models included in Llama 3.2, the rights granted under Section + 1(a) of the Llama 3.2 Community License Agreement are not being granted to you + if you are an individual domiciled in, or a company with a principal place of + business in, the European Union. This restriction does not apply to end users + of a product or service that incorporates any such multimodal models.\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation of this Policy through one of the following means:\\n\\n\\n\\n* + Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://l.workplace.com/l.php?u=https%3A%2F%2Fgithub.com%2Fmeta-llama%2Fllama-models%2Fissues\\u0026h=AT0qV8W9BFT6NwihiOHRuKYQM_UnkzN_NmHMy91OT55gkLpgi4kQupHUl0ssR4dQsIQ8n3tfd0vtkobvsEvt1l4Ic6GXI2EeuHV8N08OG2WnbAmm0FL4ObkazC6G_256vN0lN9DsykCvCqGZ)\\n* + Reporting risky content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback)\\n* + Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info)\\n* + Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama + 3.2: LlamaUseReport@meta.com\\\"\\n\",\"parameters\":\"stop \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\nCutting + Knowledge Date: December 2023\\n\\n{{ if .System }}{{ .System }}\\n{{- end }}\\n{{- + if .Tools }}When you receive a tool call response, use the output to format + an answer to the orginal user question.\\n\\nYou are a helpful assistant with + tool calling capabilities.\\n{{- end }}\\u003c|eot_id|\\u003e\\n{{- range $i, + $_ := .Messages }}\\n{{- $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- + if eq .Role \\\"user\\\" }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- + if and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ range + $.Tools }}\\n{{- . }}\\n{{ end }}\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- + else }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- end }}{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n{{ range .ToolCalls }}\\n{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}\\n{{- end }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"3.2B\",\"quantization_level\":\"Q4_K_M\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Llama-3.2\",\"general.file_type\":15,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.parameter_count\":3212749888,\"general.quantization_version\":2,\"general.size_label\":\"3B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":24,\"llama.attention.head_count_kv\":8,\"llama.attention.key_length\":128,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.attention.value_length\":128,\"llama.block_count\":28,\"llama.context_length\":131072,\"llama.embedding_length\":3072,\"llama.feed_forward_length\":8192,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-12-31T11:53:14.529771974-05:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:12 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"name": "llama3.2:3b"}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '23' + content-type: + - application/json + host: + - localhost:11434 + user-agent: + - litellm/1.57.4 + method: POST + uri: http://localhost:11434/api/show + response: + content: "{\"license\":\"LLAMA 3.2 COMMUNITY LICENSE AGREEMENT\\nLlama 3.2 Version + Release Date: September 25, 2024\\n\\n\u201CAgreement\u201D means the terms + and conditions for use, reproduction, distribution \\nand modification of the + Llama Materials set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, + manuals and documentation accompanying Llama 3.2\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are \\nentering into this Agreement on such person or entity\u2019s + behalf), of the age required under\\napplicable laws, rules or regulations to + provide legal consent and that has legal authority\\nto bind your employer or + such other person or entity if you are entering in this Agreement\\non their + behalf.\\n\\n\u201CLlama 3.2\u201D means the foundational large language models + and software and algorithms, including\\nmachine-learning model code, trained + model weights, inference-enabling code, training-enabling code,\\nfine-tuning + enabling code and other elements of the foregoing distributed by Meta at \\nhttps://www.llama.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.2 and Documentation + (and \\nany portion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, \\nif you are an entity, your principal place of business is in the EEA + or Switzerland) \\nand Meta Platforms, Inc. (if you are located outside of the + EEA or Switzerland). \\n\\n\\nBy clicking \u201CI Accept\u201D below or by using + or distributing any portion or element of the Llama Materials,\\nyou agree to + be bound by this Agreement.\\n\\n\\n1. License Rights and Redistribution.\\n\\n + \ a. Grant of Rights. You are granted a non-exclusive, worldwide, \\nnon-transferable + and royalty-free limited license under Meta\u2019s intellectual property or + other rights \\nowned by Meta embodied in the Llama Materials to use, reproduce, + distribute, copy, create derivative works \\nof, and make modifications to the + Llama Materials. \\n\\n b. Redistribution and Use. \\n\\n i. If + you distribute or make available the Llama Materials (or any derivative works + thereof), \\nor a product or service (including another AI model) that contains + any of them, you shall (A) provide\\na copy of this Agreement with any such + Llama Materials; and (B) prominently display \u201CBuilt with Llama\u201D\\non + a related website, user interface, blogpost, about page, or product documentation. + If you use the\\nLlama Materials or any outputs or results of the Llama Materials + to create, train, fine tune, or\\notherwise improve an AI model, which is distributed + or made available, you shall also include \u201CLlama\u201D\\nat the beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, + or any derivative works thereof, from a Licensee as part\\nof an integrated + end user product, then Section 2 of this Agreement will not apply to you. \\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the \\nfollowing attribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \\n\u201CLlama 3.2 is licensed under the Llama 3.2 + Community License, Copyright \xA9 Meta Platforms,\\nInc. All Rights Reserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws + and regulations\\n(including trade compliance laws and regulations) and adhere + to the Acceptable Use Policy for\\nthe Llama Materials (available at https://www.llama.com/llama3_2/use-policy), + which is hereby \\nincorporated by reference into this Agreement.\\n \\n2. + Additional Commercial Terms. If, on the Llama 3.2 version release date, the + monthly active users\\nof the products or services made available by or for + Licensee, or Licensee\u2019s affiliates, \\nis greater than 700 million monthly + active users in the preceding calendar month, you must request \\na license + from Meta, which Meta may grant to you in its sole discretion, and you are not + authorized to\\nexercise any of the rights under this Agreement unless or until + Meta otherwise expressly grants you such rights.\\n\\n3. Disclaimer of Warranty. + UNLESS REQUIRED BY APPLICABLE LAW, THE LLAMA MATERIALS AND ANY OUTPUT AND \\nRESULTS + THEREFROM ARE PROVIDED ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF + ANY KIND, AND META DISCLAIMS\\nALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND + IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\\nOF TITLE, NON-INFRINGEMENT, + MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE\\nFOR + DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE LLAMA MATERIALS + AND ASSUME ANY RISKS ASSOCIATED\\nWITH YOUR USE OF THE LLAMA MATERIALS AND ANY + OUTPUT AND RESULTS.\\n\\n4. Limitation of Liability. IN NO EVENT WILL META OR + ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, \\nWHETHER IN CONTRACT, + TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, + \\nFOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, + EXEMPLARY OR PUNITIVE DAMAGES, EVEN \\nIF META OR ITS AFFILIATES HAVE BEEN ADVISED + OF THE POSSIBILITY OF ANY OF THE FOREGOING.\\n\\n5. Intellectual Property.\\n\\n + \ a. No trademark licenses are granted under this Agreement, and in connection + with the Llama Materials, \\nneither Meta nor Licensee may use any name or mark + owned by or associated with the other or any of its affiliates, \\nexcept as + required for reasonable and customary use in describing and redistributing the + Llama Materials or as \\nset forth in this Section 5(a). Meta hereby grants + you a license to use \u201CLlama\u201D (the \u201CMark\u201D) solely as required + \\nto comply with the last sentence of Section 1.b.i. You will comply with Meta\u2019s + brand guidelines (currently accessible \\nat https://about.meta.com/brand/resources/meta/company-brand/). + All goodwill arising out of your use of the Mark \\nwill inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and + derivatives made by or for Meta, with respect to any\\n derivative works + and modifications of the Llama Materials that are made by you, as between you + and Meta,\\n you are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any + entity (including a cross-claim or\\n counterclaim in a lawsuit) alleging + that the Llama Materials or Llama 3.2 outputs or results, or any portion\\n + \ of any of the foregoing, constitutes infringement of intellectual property + or other rights owned or licensable\\n by you, then any licenses granted + to you under this Agreement shall terminate as of the date such litigation or\\n + \ claim is filed or instituted. You will indemnify and hold harmless Meta + from and against any claim by any third\\n party arising out of or related + to your use or distribution of the Llama Materials.\\n\\n6. Term and Termination. + The term of this Agreement will commence upon your acceptance of this Agreement + or access\\nto the Llama Materials and will continue in full force and effect + until terminated in accordance with the terms\\nand conditions herein. Meta + may terminate this Agreement if you are in breach of any term or condition of + this\\nAgreement. Upon termination of this Agreement, you shall delete and cease + use of the Llama Materials. Sections 3,\\n4 and 7 shall survive the termination + of this Agreement. \\n\\n7. Governing Law and Jurisdiction. This Agreement will + be governed and construed under the laws of the State of \\nCalifornia without + regard to choice of law principles, and the UN Convention on Contracts for the + International\\nSale of Goods does not apply to this Agreement. The courts of + California shall have exclusive jurisdiction of\\nany dispute arising out of + this Agreement.\\n**Llama 3.2** **Acceptable Use Policy**\\n\\nMeta is committed + to promoting safe and fair use of its tools and features, including Llama 3.2. + If you access or use Llama 3.2, you agree to this Acceptable Use Policy (\u201C**Policy**\u201D). + The most recent copy of this policy can be found at [https://www.llama.com/llama3_2/use-policy](https://www.llama.com/llama3_2/use-policy).\\n\\n**Prohibited + Uses**\\n\\nWe want everyone to use Llama 3.2 safely and responsibly. You agree + you will not use, or allow others to use, Llama 3.2 to:\\n\\n\\n\\n1. Violate + the law or others\u2019 rights, including to:\\n 1. Engage in, promote, generate, + contribute to, encourage, plan, incite, or further illegal or unlawful activity + or content, such as:\\n 1. Violence or terrorism\\n 2. Exploitation + or harm to children, including the solicitation, creation, acquisition, or dissemination + of child exploitative content or failure to report Child Sexual Abuse Material\\n + \ 3. Human trafficking, exploitation, and sexual violence\\n 4. + The illegal distribution of information or materials to minors, including obscene + materials, or failure to employ legally required age-gating in connection with + such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 1. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 2. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 3. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 4. Collect, process, disclose, generate, + or infer private or sensitive information about individuals, including information + about individuals\u2019 identity, health, or demographic information, unless + you have obtained the right to do so in accordance with applicable law\\n 5. + Engage in or facilitate any action or generate any content that infringes, misappropriates, + or otherwise violates any third-party rights, including the outputs or results + of any products or services using the Llama Materials\\n 6. Create, generate, + or facilitate the creation of malicious code, malware, computer viruses or do + anything else that could disable, overburden, interfere with or impair the proper + working, integrity, operation or appearance of a website or computer system\\n + \ 7. Engage in any action, or facilitate any action, to intentionally circumvent + or remove usage restrictions or other safety measures, or to enable functionality + disabled by Meta\\n2. Engage in, promote, incite, facilitate, or assist in the + planning or development of activities that present a risk of death or bodily + harm to individuals, including use of Llama 3.2 related to the following:\\n + \ 8. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State or to + the U.S. Biological Weapons Anti-Terrorism Act of 1989 or the Chemical Weapons + Convention Implementation Act of 1997\\n 9. Guns and illegal weapons (including + weapon development)\\n 10. Illegal drugs and regulated/controlled substances\\n + \ 11. Operation of critical infrastructure, transportation technologies, or + heavy machinery\\n 12. Self-harm or harm to others, including suicide, cutting, + and eating disorders\\n 13. Any content intended to incite or promote violence, + abuse, or any infliction of bodily harm to an individual\\n3. Intentionally + deceive or mislead others, including use of Llama 3.2 related to the following:\\n + \ 14. Generating, promoting, or furthering fraud or the creation or promotion + of disinformation\\n 15. Generating, promoting, or furthering defamatory + content, including the creation of defamatory statements, images, or other content\\n + \ 16. Generating, promoting, or further distributing spam\\n 17. Impersonating + another individual without consent, authorization, or legal right\\n 18. + Representing that the use of Llama 3.2 or outputs are human-generated\\n 19. + Generating or facilitating false online engagement, including fake reviews and + other means of fake online engagement\\n4. Fail to appropriately disclose to + end users any known dangers of your AI system\\n5. Interact with third party + tools, models, or software designed to generate unlawful content or engage in + unlawful or harmful conduct and/or represent that the outputs of such tools, + models, or software are associated with Meta or Llama 3.2\\n\\nWith respect + to any multimodal models included in Llama 3.2, the rights granted under Section + 1(a) of the Llama 3.2 Community License Agreement are not being granted to you + if you are an individual domiciled in, or a company with a principal place of + business in, the European Union. This restriction does not apply to end users + of a product or service that incorporates any such multimodal models.\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation of this Policy through one of the following means:\\n\\n\\n\\n* + Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://l.workplace.com/l.php?u=https%3A%2F%2Fgithub.com%2Fmeta-llama%2Fllama-models%2Fissues\\u0026h=AT0qV8W9BFT6NwihiOHRuKYQM_UnkzN_NmHMy91OT55gkLpgi4kQupHUl0ssR4dQsIQ8n3tfd0vtkobvsEvt1l4Ic6GXI2EeuHV8N08OG2WnbAmm0FL4ObkazC6G_256vN0lN9DsykCvCqGZ)\\n* + Reporting risky content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback)\\n* + Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info)\\n* + Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama + 3.2: LlamaUseReport@meta.com\",\"modelfile\":\"# Modelfile generated by \\\"ollama + show\\\"\\n# To build a new Modelfile based on this, replace FROM with:\\n# + FROM llama3.2:3b\\n\\nFROM /Users/brandonhancock/.ollama/models/blobs/sha256-dde5aa3fc5ffc17176b5e8bdc82f587b24b2678c6c66101bf7da77af9f7ccdff\\nTEMPLATE + \\\"\\\"\\\"\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\nCutting + Knowledge Date: December 2023\\n\\n{{ if .System }}{{ .System }}\\n{{- end }}\\n{{- + if .Tools }}When you receive a tool call response, use the output to format + an answer to the orginal user question.\\n\\nYou are a helpful assistant with + tool calling capabilities.\\n{{- end }}\\u003c|eot_id|\\u003e\\n{{- range $i, + $_ := .Messages }}\\n{{- $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- + if eq .Role \\\"user\\\" }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- + if and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ range + $.Tools }}\\n{{- . }}\\n{{ end }}\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- + else }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- end }}{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n{{ range .ToolCalls }}\\n{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}\\n{{- end }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\\\"\\\"\\\"\\nPARAMETER stop \\u003c|start_header_id|\\u003e\\nPARAMETER + stop \\u003c|end_header_id|\\u003e\\nPARAMETER stop \\u003c|eot_id|\\u003e\\nLICENSE + \\\"LLAMA 3.2 COMMUNITY LICENSE AGREEMENT\\nLlama 3.2 Version Release Date: + September 25, 2024\\n\\n\u201CAgreement\u201D means the terms and conditions + for use, reproduction, distribution \\nand modification of the Llama Materials + set forth herein.\\n\\n\u201CDocumentation\u201D means the specifications, manuals + and documentation accompanying Llama 3.2\\ndistributed by Meta at https://llama.meta.com/doc/overview.\\n\\n\u201CLicensee\u201D + or \u201Cyou\u201D means you, or your employer or any other person or entity + (if you are \\nentering into this Agreement on such person or entity\u2019s + behalf), of the age required under\\napplicable laws, rules or regulations to + provide legal consent and that has legal authority\\nto bind your employer or + such other person or entity if you are entering in this Agreement\\non their + behalf.\\n\\n\u201CLlama 3.2\u201D means the foundational large language models + and software and algorithms, including\\nmachine-learning model code, trained + model weights, inference-enabling code, training-enabling code,\\nfine-tuning + enabling code and other elements of the foregoing distributed by Meta at \\nhttps://www.llama.com/llama-downloads.\\n\\n\u201CLlama + Materials\u201D means, collectively, Meta\u2019s proprietary Llama 3.2 and Documentation + (and \\nany portion thereof) made available under this Agreement.\\n\\n\u201CMeta\u201D + or \u201Cwe\u201D means Meta Platforms Ireland Limited (if you are located in + or, \\nif you are an entity, your principal place of business is in the EEA + or Switzerland) \\nand Meta Platforms, Inc. (if you are located outside of the + EEA or Switzerland). \\n\\n\\nBy clicking \u201CI Accept\u201D below or by using + or distributing any portion or element of the Llama Materials,\\nyou agree to + be bound by this Agreement.\\n\\n\\n1. License Rights and Redistribution.\\n\\n + \ a. Grant of Rights. You are granted a non-exclusive, worldwide, \\nnon-transferable + and royalty-free limited license under Meta\u2019s intellectual property or + other rights \\nowned by Meta embodied in the Llama Materials to use, reproduce, + distribute, copy, create derivative works \\nof, and make modifications to the + Llama Materials. \\n\\n b. Redistribution and Use. \\n\\n i. If + you distribute or make available the Llama Materials (or any derivative works + thereof), \\nor a product or service (including another AI model) that contains + any of them, you shall (A) provide\\na copy of this Agreement with any such + Llama Materials; and (B) prominently display \u201CBuilt with Llama\u201D\\non + a related website, user interface, blogpost, about page, or product documentation. + If you use the\\nLlama Materials or any outputs or results of the Llama Materials + to create, train, fine tune, or\\notherwise improve an AI model, which is distributed + or made available, you shall also include \u201CLlama\u201D\\nat the beginning + of any such AI model name.\\n\\n ii. If you receive Llama Materials, + or any derivative works thereof, from a Licensee as part\\nof an integrated + end user product, then Section 2 of this Agreement will not apply to you. \\n\\n + \ iii. You must retain in all copies of the Llama Materials that you distribute + the \\nfollowing attribution notice within a \u201CNotice\u201D text file distributed + as a part of such copies: \\n\u201CLlama 3.2 is licensed under the Llama 3.2 + Community License, Copyright \xA9 Meta Platforms,\\nInc. All Rights Reserved.\u201D\\n\\n + \ iv. Your use of the Llama Materials must comply with applicable laws + and regulations\\n(including trade compliance laws and regulations) and adhere + to the Acceptable Use Policy for\\nthe Llama Materials (available at https://www.llama.com/llama3_2/use-policy), + which is hereby \\nincorporated by reference into this Agreement.\\n \\n2. + Additional Commercial Terms. If, on the Llama 3.2 version release date, the + monthly active users\\nof the products or services made available by or for + Licensee, or Licensee\u2019s affiliates, \\nis greater than 700 million monthly + active users in the preceding calendar month, you must request \\na license + from Meta, which Meta may grant to you in its sole discretion, and you are not + authorized to\\nexercise any of the rights under this Agreement unless or until + Meta otherwise expressly grants you such rights.\\n\\n3. Disclaimer of Warranty. + UNLESS REQUIRED BY APPLICABLE LAW, THE LLAMA MATERIALS AND ANY OUTPUT AND \\nRESULTS + THEREFROM ARE PROVIDED ON AN \u201CAS IS\u201D BASIS, WITHOUT WARRANTIES OF + ANY KIND, AND META DISCLAIMS\\nALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND + IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\\nOF TITLE, NON-INFRINGEMENT, + MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE\\nFOR + DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE LLAMA MATERIALS + AND ASSUME ANY RISKS ASSOCIATED\\nWITH YOUR USE OF THE LLAMA MATERIALS AND ANY + OUTPUT AND RESULTS.\\n\\n4. Limitation of Liability. IN NO EVENT WILL META OR + ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, \\nWHETHER IN CONTRACT, + TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, + \\nFOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, + EXEMPLARY OR PUNITIVE DAMAGES, EVEN \\nIF META OR ITS AFFILIATES HAVE BEEN ADVISED + OF THE POSSIBILITY OF ANY OF THE FOREGOING.\\n\\n5. Intellectual Property.\\n\\n + \ a. No trademark licenses are granted under this Agreement, and in connection + with the Llama Materials, \\nneither Meta nor Licensee may use any name or mark + owned by or associated with the other or any of its affiliates, \\nexcept as + required for reasonable and customary use in describing and redistributing the + Llama Materials or as \\nset forth in this Section 5(a). Meta hereby grants + you a license to use \u201CLlama\u201D (the \u201CMark\u201D) solely as required + \\nto comply with the last sentence of Section 1.b.i. You will comply with Meta\u2019s + brand guidelines (currently accessible \\nat https://about.meta.com/brand/resources/meta/company-brand/). + All goodwill arising out of your use of the Mark \\nwill inure to the benefit + of Meta.\\n\\n b. Subject to Meta\u2019s ownership of Llama Materials and + derivatives made by or for Meta, with respect to any\\n derivative works + and modifications of the Llama Materials that are made by you, as between you + and Meta,\\n you are and will be the owner of such derivative works and modifications.\\n\\n + \ c. If you institute litigation or other proceedings against Meta or any + entity (including a cross-claim or\\n counterclaim in a lawsuit) alleging + that the Llama Materials or Llama 3.2 outputs or results, or any portion\\n + \ of any of the foregoing, constitutes infringement of intellectual property + or other rights owned or licensable\\n by you, then any licenses granted + to you under this Agreement shall terminate as of the date such litigation or\\n + \ claim is filed or instituted. You will indemnify and hold harmless Meta + from and against any claim by any third\\n party arising out of or related + to your use or distribution of the Llama Materials.\\n\\n6. Term and Termination. + The term of this Agreement will commence upon your acceptance of this Agreement + or access\\nto the Llama Materials and will continue in full force and effect + until terminated in accordance with the terms\\nand conditions herein. Meta + may terminate this Agreement if you are in breach of any term or condition of + this\\nAgreement. Upon termination of this Agreement, you shall delete and cease + use of the Llama Materials. Sections 3,\\n4 and 7 shall survive the termination + of this Agreement. \\n\\n7. Governing Law and Jurisdiction. This Agreement will + be governed and construed under the laws of the State of \\nCalifornia without + regard to choice of law principles, and the UN Convention on Contracts for the + International\\nSale of Goods does not apply to this Agreement. The courts of + California shall have exclusive jurisdiction of\\nany dispute arising out of + this Agreement.\\\"\\nLICENSE \\\"**Llama 3.2** **Acceptable Use Policy**\\n\\nMeta + is committed to promoting safe and fair use of its tools and features, including + Llama 3.2. If you access or use Llama 3.2, you agree to this Acceptable Use + Policy (\u201C**Policy**\u201D). The most recent copy of this policy can be + found at [https://www.llama.com/llama3_2/use-policy](https://www.llama.com/llama3_2/use-policy).\\n\\n**Prohibited + Uses**\\n\\nWe want everyone to use Llama 3.2 safely and responsibly. You agree + you will not use, or allow others to use, Llama 3.2 to:\\n\\n\\n\\n1. Violate + the law or others\u2019 rights, including to:\\n 1. Engage in, promote, generate, + contribute to, encourage, plan, incite, or further illegal or unlawful activity + or content, such as:\\n 1. Violence or terrorism\\n 2. Exploitation + or harm to children, including the solicitation, creation, acquisition, or dissemination + of child exploitative content or failure to report Child Sexual Abuse Material\\n + \ 3. Human trafficking, exploitation, and sexual violence\\n 4. + The illegal distribution of information or materials to minors, including obscene + materials, or failure to employ legally required age-gating in connection with + such information or materials.\\n 5. Sexual solicitation\\n 6. + Any other criminal activity\\n 1. Engage in, promote, incite, or facilitate + the harassment, abuse, threatening, or bullying of individuals or groups of + individuals\\n 2. Engage in, promote, incite, or facilitate discrimination + or other unlawful or harmful conduct in the provision of employment, employment + benefits, credit, housing, other economic benefits, or other essential goods + and services\\n 3. Engage in the unauthorized or unlicensed practice of any + profession including, but not limited to, financial, legal, medical/health, + or related professional practices\\n 4. Collect, process, disclose, generate, + or infer private or sensitive information about individuals, including information + about individuals\u2019 identity, health, or demographic information, unless + you have obtained the right to do so in accordance with applicable law\\n 5. + Engage in or facilitate any action or generate any content that infringes, misappropriates, + or otherwise violates any third-party rights, including the outputs or results + of any products or services using the Llama Materials\\n 6. Create, generate, + or facilitate the creation of malicious code, malware, computer viruses or do + anything else that could disable, overburden, interfere with or impair the proper + working, integrity, operation or appearance of a website or computer system\\n + \ 7. Engage in any action, or facilitate any action, to intentionally circumvent + or remove usage restrictions or other safety measures, or to enable functionality + disabled by Meta\\n2. Engage in, promote, incite, facilitate, or assist in the + planning or development of activities that present a risk of death or bodily + harm to individuals, including use of Llama 3.2 related to the following:\\n + \ 8. Military, warfare, nuclear industries or applications, espionage, use + for materials or activities that are subject to the International Traffic Arms + Regulations (ITAR) maintained by the United States Department of State or to + the U.S. Biological Weapons Anti-Terrorism Act of 1989 or the Chemical Weapons + Convention Implementation Act of 1997\\n 9. Guns and illegal weapons (including + weapon development)\\n 10. Illegal drugs and regulated/controlled substances\\n + \ 11. Operation of critical infrastructure, transportation technologies, or + heavy machinery\\n 12. Self-harm or harm to others, including suicide, cutting, + and eating disorders\\n 13. Any content intended to incite or promote violence, + abuse, or any infliction of bodily harm to an individual\\n3. Intentionally + deceive or mislead others, including use of Llama 3.2 related to the following:\\n + \ 14. Generating, promoting, or furthering fraud or the creation or promotion + of disinformation\\n 15. Generating, promoting, or furthering defamatory + content, including the creation of defamatory statements, images, or other content\\n + \ 16. Generating, promoting, or further distributing spam\\n 17. Impersonating + another individual without consent, authorization, or legal right\\n 18. + Representing that the use of Llama 3.2 or outputs are human-generated\\n 19. + Generating or facilitating false online engagement, including fake reviews and + other means of fake online engagement\\n4. Fail to appropriately disclose to + end users any known dangers of your AI system\\n5. Interact with third party + tools, models, or software designed to generate unlawful content or engage in + unlawful or harmful conduct and/or represent that the outputs of such tools, + models, or software are associated with Meta or Llama 3.2\\n\\nWith respect + to any multimodal models included in Llama 3.2, the rights granted under Section + 1(a) of the Llama 3.2 Community License Agreement are not being granted to you + if you are an individual domiciled in, or a company with a principal place of + business in, the European Union. This restriction does not apply to end users + of a product or service that incorporates any such multimodal models.\\n\\nPlease + report any violation of this Policy, software \u201Cbug,\u201D or other problems + that could lead to a violation of this Policy through one of the following means:\\n\\n\\n\\n* + Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://l.workplace.com/l.php?u=https%3A%2F%2Fgithub.com%2Fmeta-llama%2Fllama-models%2Fissues\\u0026h=AT0qV8W9BFT6NwihiOHRuKYQM_UnkzN_NmHMy91OT55gkLpgi4kQupHUl0ssR4dQsIQ8n3tfd0vtkobvsEvt1l4Ic6GXI2EeuHV8N08OG2WnbAmm0FL4ObkazC6G_256vN0lN9DsykCvCqGZ)\\n* + Reporting risky content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback)\\n* + Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info)\\n* + Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama + 3.2: LlamaUseReport@meta.com\\\"\\n\",\"parameters\":\"stop \\\"\\u003c|start_header_id|\\u003e\\\"\\nstop + \ \\\"\\u003c|end_header_id|\\u003e\\\"\\nstop \\\"\\u003c|eot_id|\\u003e\\\"\",\"template\":\"\\u003c|start_header_id|\\u003esystem\\u003c|end_header_id|\\u003e\\n\\nCutting + Knowledge Date: December 2023\\n\\n{{ if .System }}{{ .System }}\\n{{- end }}\\n{{- + if .Tools }}When you receive a tool call response, use the output to format + an answer to the orginal user question.\\n\\nYou are a helpful assistant with + tool calling capabilities.\\n{{- end }}\\u003c|eot_id|\\u003e\\n{{- range $i, + $_ := .Messages }}\\n{{- $last := eq (len (slice $.Messages $i)) 1 }}\\n{{- + if eq .Role \\\"user\\\" }}\\u003c|start_header_id|\\u003euser\\u003c|end_header_id|\\u003e\\n{{- + if and $.Tools $last }}\\n\\nGiven the following functions, please respond with + a JSON for a function call with its proper arguments that best answers the given + prompt.\\n\\nRespond in the format {\\\"name\\\": function name, \\\"parameters\\\": + dictionary of argument name and its value}. Do not use variables.\\n\\n{{ range + $.Tools }}\\n{{- . }}\\n{{ end }}\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- + else }}\\n\\n{{ .Content }}\\u003c|eot_id|\\u003e\\n{{- end }}{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- else if eq .Role \\\"assistant\\\" }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n{{- + if .ToolCalls }}\\n{{ range .ToolCalls }}\\n{\\\"name\\\": \\\"{{ .Function.Name + }}\\\", \\\"parameters\\\": {{ .Function.Arguments }}}{{ end }}\\n{{- else }}\\n\\n{{ + .Content }}\\n{{- end }}{{ if not $last }}\\u003c|eot_id|\\u003e{{ end }}\\n{{- + else if eq .Role \\\"tool\\\" }}\\u003c|start_header_id|\\u003eipython\\u003c|end_header_id|\\u003e\\n\\n{{ + .Content }}\\u003c|eot_id|\\u003e{{ if $last }}\\u003c|start_header_id|\\u003eassistant\\u003c|end_header_id|\\u003e\\n\\n{{ + end }}\\n{{- end }}\\n{{- end }}\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":[\"llama\"],\"parameter_size\":\"3.2B\",\"quantization_level\":\"Q4_K_M\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.basename\":\"Llama-3.2\",\"general.file_type\":15,\"general.finetune\":\"Instruct\",\"general.languages\":[\"en\",\"de\",\"fr\",\"it\",\"pt\",\"hi\",\"es\",\"th\"],\"general.parameter_count\":3212749888,\"general.quantization_version\":2,\"general.size_label\":\"3B\",\"general.tags\":[\"facebook\",\"meta\",\"pytorch\",\"llama\",\"llama-3\",\"text-generation\"],\"general.type\":\"model\",\"llama.attention.head_count\":24,\"llama.attention.head_count_kv\":8,\"llama.attention.key_length\":128,\"llama.attention.layer_norm_rms_epsilon\":0.00001,\"llama.attention.value_length\":128,\"llama.block_count\":28,\"llama.context_length\":131072,\"llama.embedding_length\":3072,\"llama.feed_forward_length\":8192,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":500000,\"llama.vocab_size\":128256,\"tokenizer.ggml.bos_token_id\":128000,\"tokenizer.ggml.eos_token_id\":128009,\"tokenizer.ggml.merges\":null,\"tokenizer.ggml.model\":\"gpt2\",\"tokenizer.ggml.pre\":\"llama-bpe\",\"tokenizer.ggml.token_type\":null,\"tokenizer.ggml.tokens\":null},\"modified_at\":\"2024-12-31T11:53:14.529771974-05:00\"}" + headers: + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 Jan 2025 20:47:12 GMT + Transfer-Encoding: + - chunked + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/tests/utilities/cassettes/test_converter_with_nested_model.yaml b/tests/utilities/cassettes/test_converter_with_nested_model.yaml new file mode 100644 index 0000000000..b5f8e38e70 --- /dev/null +++ b/tests/utilities/cassettes/test_converter_with_nested_model.yaml @@ -0,0 +1,116 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": "Name: John Doe\nAge: 30\nAddress: + 123 Main St, Anytown, 12345"}], "model": "gpt-4o-mini", "tool_choice": {"type": + "function", "function": {"name": "Person"}}, "tools": [{"type": "function", + "function": {"name": "Person", "description": "Correctly extracted `Person` + with all the required parameters with correct types", "parameters": {"$defs": + {"Address": {"properties": {"street": {"title": "Street", "type": "string"}, + "city": {"title": "City", "type": "string"}, "zip_code": {"title": "Zip Code", + "type": "string"}}, "required": ["street", "city", "zip_code"], "title": "Address", + "type": "object"}}, "properties": {"name": {"title": "Name", "type": "string"}, + "age": {"title": "Age", "type": "integer"}, "address": {"$ref": "#/$defs/Address"}}, + "required": ["address", "age", "name"], "type": "object"}}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '853' + content-type: + - application/json + cookie: + - __cf_bm=PzayZLF04c14veGc.0ocVg3VHBbpzKRW8Hqox8L9U7c-1736974028-1.0.1.1-mZpK8.SH9l7K2z8Tvt6z.dURiVPjFqEz7zYEITfRwdr5z0razsSebZGN9IRPmI5XC_w5rbZW2Kg6hh5cenXinQ; + _cfuvid=ciwC3n2Srn20xx4JhEUeN6Ap0tNBaE44S95nIilboQ0-1736974028496-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.59.6 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.59.6 + x-stainless-raw-response: + - 'true' + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.7 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + content: "{\n \"id\": \"chatcmpl-Aq4aFpbhU10QK0e6Jlkxy8AUxCZCf\",\n \"object\": + \"chat.completion\",\n \"created\": 1736974039,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n + \ \"id\": \"call_N29aoGL9tN0qL2O7HI8Op2so\",\n \"type\": + \"function\",\n \"function\": {\n \"name\": \"Person\",\n + \ \"arguments\": \"{\\\"name\\\":\\\"John Doe\\\",\\\"age\\\":30,\\\"address\\\":{\\\"street\\\":\\\"123 + Main St\\\",\\\"city\\\":\\\"Anytown\\\",\\\"zip_code\\\":\\\"12345\\\"}}\"\n + \ }\n }\n ],\n \"refusal\": null\n },\n + \ \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n ],\n + \ \"usage\": {\n \"prompt_tokens\": 118,\n \"completion_tokens\": 30,\n + \ \"total_tokens\": 148,\n \"prompt_tokens_details\": {\n \"cached_tokens\": + 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n + \ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\": + 0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\": + \"default\",\n \"system_fingerprint\": \"fp_bd83329f63\"\n}\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9028b863dbaa672f-ATL + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Wed, 15 Jan 2025 20:47:20 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '840' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149999968' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_2f9d1e3f0ace4944891dde05093486aa + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/tests/utilities/test_converter.py b/tests/utilities/test_converter.py index c63d6dba37..df906acd72 100644 --- a/tests/utilities/test_converter.py +++ b/tests/utilities/test_converter.py @@ -39,6 +39,22 @@ class NestedModel(BaseModel): data: SimpleModel +class Address(BaseModel): + street: str + city: str + zip_code: str + + +class Person(BaseModel): + name: str + age: int + address: Address + + +class CustomConverter(Converter): + pass + + # Fixtures @pytest.fixture def mock_agent(): @@ -199,26 +215,23 @@ def test_convert_with_instructions_failure( # Tests for get_conversion_instructions def test_get_conversion_instructions_gpt(): - mock_llm = Mock() - mock_llm.openai_api_base = None + llm = LLM(model="gpt-4o-mini") with patch.object(LLM, "supports_function_calling") as supports_function_calling: supports_function_calling.return_value = True - instructions = get_conversion_instructions(SimpleModel, mock_llm) + instructions = get_conversion_instructions(SimpleModel, llm) model_schema = PydanticSchemaParser(model=SimpleModel).get_schema() assert ( instructions - == f"I'm gonna convert this raw text into valid JSON.\n\nThe json should have the following structure, with the following keys:\n{model_schema}" + == f"Please convert the following text into valid JSON.\n\nThe JSON should follow this schema:\n```json\n{model_schema}\n```" ) def test_get_conversion_instructions_non_gpt(): - mock_llm = Mock() - with patch.object(LLM, "supports_function_calling") as supports_function_calling: - supports_function_calling.return_value = False - with patch("crewai.utilities.converter.PydanticSchemaParser") as mock_parser: - mock_parser.return_value.get_schema.return_value = "Sample schema" - instructions = get_conversion_instructions(SimpleModel, mock_llm) - assert "Sample schema" in instructions + llm = LLM(model="ollama/llama3.1", base_url="http://localhost:11434") + with patch.object(LLM, "supports_function_calling", return_value=False): + instructions = get_conversion_instructions(SimpleModel, llm) + assert '"name": str' in instructions + assert '"age": int' in instructions # Tests for is_gpt @@ -232,10 +245,6 @@ def test_supports_function_calling_false(): assert llm.supports_function_calling() is False -class CustomConverter(Converter): - pass - - def test_create_converter_with_mock_agent(): mock_agent = MagicMock() mock_agent.get_output_converter.return_value = MagicMock(spec=Converter) @@ -255,7 +264,7 @@ def test_create_converter_with_mock_agent(): def test_create_converter_with_custom_converter(): converter = create_converter( converter_cls=CustomConverter, - llm=Mock(), + llm=LLM(model="gpt-4o-mini"), text="Sample", model=SimpleModel, instructions="Convert", @@ -313,3 +322,269 @@ class ModelWithDictField(BaseModel): description = generate_model_description(ModelWithDictField) expected_description = '{\n "attributes": Dict[str, int]\n}' assert description == expected_description + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_convert_with_instructions(): + llm = LLM(model="gpt-4o-mini") + sample_text = "Name: Alice, Age: 30" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + ) + + # Act + output = converter.to_pydantic() + + # Assert + assert isinstance(output, SimpleModel) + assert output.name == "Alice" + assert output.age == 30 + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_converter_with_llama3_2_model(): + llm = LLM(model="ollama/llama3.2:3b", base_url="http://localhost:11434") + + sample_text = "Name: Alice Llama, Age: 30" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, SimpleModel) + assert output.name == "Alice Llama" + assert output.age == 30 + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_converter_with_llama3_1_model(): + llm = LLM(model="ollama/llama3.1", base_url="http://localhost:11434") + sample_text = "Name: Alice Llama, Age: 30" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, SimpleModel) + assert output.name == "Alice Llama" + assert output.age == 30 + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_converter_with_nested_model(): + llm = LLM(model="gpt-4o-mini") + sample_text = "Name: John Doe\nAge: 30\nAddress: 123 Main St, Anytown, 12345" + + instructions = get_conversion_instructions(Person, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=Person, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, Person) + assert output.name == "John Doe" + assert output.age == 30 + assert isinstance(output.address, Address) + assert output.address.street == "123 Main St" + assert output.address.city == "Anytown" + assert output.address.zip_code == "12345" + + +# Tests for error handling +def test_converter_error_handling(): + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + llm.call.return_value = "Invalid JSON" + sample_text = "Name: Alice, Age: 30" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + ) + + with pytest.raises(ConverterError) as exc_info: + output = converter.to_pydantic() + + assert "Failed to convert text into a Pydantic model" in str(exc_info.value) + + +# Tests for retry logic +def test_converter_retry_logic(): + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + llm.call.side_effect = [ + "Invalid JSON", + "Still invalid", + '{"name": "Retry Alice", "age": 30}', + ] + sample_text = "Name: Retry Alice, Age: 30" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + max_attempts=3, + ) + + output = converter.to_pydantic() + + assert isinstance(output, SimpleModel) + assert output.name == "Retry Alice" + assert output.age == 30 + assert llm.call.call_count == 3 + + +# Tests for optional fields +def test_converter_with_optional_fields(): + class OptionalModel(BaseModel): + name: str + age: Optional[int] + + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + # Simulate the LLM's response with 'age' explicitly set to null + llm.call.return_value = '{"name": "Bob", "age": null}' + sample_text = "Name: Bob, age: None" + + instructions = get_conversion_instructions(OptionalModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=OptionalModel, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, OptionalModel) + assert output.name == "Bob" + assert output.age is None + + +# Tests for list fields +def test_converter_with_list_field(): + class ListModel(BaseModel): + items: List[int] + + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + llm.call.return_value = '{"items": [1, 2, 3]}' + sample_text = "Items: 1, 2, 3" + + instructions = get_conversion_instructions(ListModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=ListModel, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, ListModel) + assert output.items == [1, 2, 3] + + +# Tests for enums +from enum import Enum + + +def test_converter_with_enum(): + class Color(Enum): + RED = "red" + GREEN = "green" + BLUE = "blue" + + class EnumModel(BaseModel): + name: str + color: Color + + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + llm.call.return_value = '{"name": "Alice", "color": "red"}' + sample_text = "Name: Alice, Color: Red" + + instructions = get_conversion_instructions(EnumModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=EnumModel, + instructions=instructions, + ) + + output = converter.to_pydantic() + + assert isinstance(output, EnumModel) + assert output.name == "Alice" + assert output.color == Color.RED + + +# Tests for ambiguous input +def test_converter_with_ambiguous_input(): + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = False + llm.call.return_value = '{"name": "Charlie", "age": "Not an age"}' + sample_text = "Charlie is thirty years old" + + instructions = get_conversion_instructions(SimpleModel, llm) + converter = Converter( + llm=llm, + text=sample_text, + model=SimpleModel, + instructions=instructions, + ) + + with pytest.raises(ConverterError) as exc_info: + output = converter.to_pydantic() + + assert "validation error" in str(exc_info.value).lower() + + +# Tests for function calling support +def test_converter_with_function_calling(): + llm = Mock(spec=LLM) + llm.supports_function_calling.return_value = True + + instructor = Mock() + instructor.to_pydantic.return_value = SimpleModel(name="Eve", age=35) + + converter = Converter( + llm=llm, + text="Name: Eve, Age: 35", + model=SimpleModel, + instructions="Convert this text.", + ) + converter._create_instructor = Mock(return_value=instructor) + + output = converter.to_pydantic() + + assert isinstance(output, SimpleModel) + assert output.name == "Eve" + assert output.age == 35 + instructor.to_pydantic.assert_called_once() diff --git a/tests/utilities/test_pydantic_schema_parser.py b/tests/utilities/test_pydantic_schema_parser.py new file mode 100644 index 0000000000..ee6d7e287f --- /dev/null +++ b/tests/utilities/test_pydantic_schema_parser.py @@ -0,0 +1,94 @@ +from typing import Any, Dict, List, Optional, Set, Tuple, Union + +import pytest +from pydantic import BaseModel, Field + +from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser + + +def test_simple_model(): + class SimpleModel(BaseModel): + field1: int + field2: str + + parser = PydanticSchemaParser(model=SimpleModel) + schema = parser.get_schema() + + expected_schema = """{ + field1: int, + field2: str +}""" + assert schema.strip() == expected_schema.strip() + + +def test_nested_model(): + class NestedModel(BaseModel): + nested_field: int + + class ParentModel(BaseModel): + parent_field: str + nested: NestedModel + + parser = PydanticSchemaParser(model=ParentModel) + schema = parser.get_schema() + + expected_schema = """{ + parent_field: str, + nested: NestedModel + { + nested_field: int + } +}""" + assert schema.strip() == expected_schema.strip() + + +def test_model_with_list(): + class ListModel(BaseModel): + list_field: List[int] + + parser = PydanticSchemaParser(model=ListModel) + schema = parser.get_schema() + + expected_schema = """{ + list_field: List[int] +}""" + assert schema.strip() == expected_schema.strip() + + +def test_model_with_optional_field(): + class OptionalModel(BaseModel): + optional_field: Optional[str] + + parser = PydanticSchemaParser(model=OptionalModel) + schema = parser.get_schema() + + expected_schema = """{ + optional_field: Optional[str] +}""" + assert schema.strip() == expected_schema.strip() + + +def test_model_with_union(): + class UnionModel(BaseModel): + union_field: Union[int, str] + + parser = PydanticSchemaParser(model=UnionModel) + schema = parser.get_schema() + + expected_schema = """{ + union_field: Union[int, str] +}""" + assert schema.strip() == expected_schema.strip() + + +def test_model_with_dict(): + class DictModel(BaseModel): + dict_field: Dict[str, int] + + parser = PydanticSchemaParser(model=DictModel) + schema = parser.get_schema() + + expected_schema = """{ + dict_field: Dict[str, int] +}""" + assert schema.strip() == expected_schema.strip() From cc129a0bce435642d14df34cf9474ea0cc4838e2 Mon Sep 17 00:00:00 2001 From: "Brandon Hancock (bhancock_ai)" <109994880+bhancockio@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:47:59 -0500 Subject: [PATCH 4/6] Fix docling issues (#1909) * Fix docling issues * update docs --- docs/concepts/knowledge.mdx | 6 ++++++ src/crewai/knowledge/source/crew_docling_source.py | 11 ++++++----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/concepts/knowledge.mdx b/docs/concepts/knowledge.mdx index 91110e19f7..254a56bb81 100644 --- a/docs/concepts/knowledge.mdx +++ b/docs/concepts/knowledge.mdx @@ -93,6 +93,12 @@ result = crew.kickoff(inputs={"question": "What city does John live in and how o Here's another example with the `CrewDoclingSource`. The CrewDoclingSource is actually quite versatile and can handle multiple file formats including TXT, PDF, DOCX, HTML, and more. + + You need to install `docling` for the following example to work: `uv add docling` + + + + ```python Code from crewai import LLM, Agent, Crew, Process, Task from crewai.knowledge.source.crew_docling_source import CrewDoclingSource diff --git a/src/crewai/knowledge/source/crew_docling_source.py b/src/crewai/knowledge/source/crew_docling_source.py index bbfcf9b922..6ca0ae967b 100644 --- a/src/crewai/knowledge/source/crew_docling_source.py +++ b/src/crewai/knowledge/source/crew_docling_source.py @@ -8,6 +8,7 @@ from docling.exceptions import ConversionError from docling_core.transforms.chunker.hierarchical_chunker import HierarchicalChunker from docling_core.types.doc.document import DoclingDocument + DOCLING_AVAILABLE = True except ImportError: DOCLING_AVAILABLE = False @@ -38,8 +39,8 @@ def __init__(self, *args, **kwargs): file_paths: List[Union[Path, str]] = Field(default_factory=list) chunks: List[str] = Field(default_factory=list) safe_file_paths: List[Union[Path, str]] = Field(default_factory=list) - content: List[DoclingDocument] = Field(default_factory=list) - document_converter: DocumentConverter = Field( + content: List["DoclingDocument"] = Field(default_factory=list) + document_converter: "DocumentConverter" = Field( default_factory=lambda: DocumentConverter( allowed_formats=[ InputFormat.MD, @@ -65,7 +66,7 @@ def model_post_init(self, _) -> None: self.safe_file_paths = self.validate_content() self.content = self._load_content() - def _load_content(self) -> List[DoclingDocument]: + def _load_content(self) -> List["DoclingDocument"]: try: return self._convert_source_to_docling_documents() except ConversionError as e: @@ -87,11 +88,11 @@ def add(self) -> None: self.chunks.extend(list(new_chunks_iterable)) self._save_documents() - def _convert_source_to_docling_documents(self) -> List[DoclingDocument]: + def _convert_source_to_docling_documents(self) -> List["DoclingDocument"]: conv_results_iter = self.document_converter.convert_all(self.safe_file_paths) return [result.document for result in conv_results_iter] - def _chunk_doc(self, doc: DoclingDocument) -> Iterator[str]: + def _chunk_doc(self, doc: "DoclingDocument") -> Iterator[str]: chunker = HierarchicalChunker() for chunk in chunker.chunk(doc): yield chunk.text From 3fecde49b6894461038ca46d9c079d3e235bfa0f Mon Sep 17 00:00:00 2001 From: fzowl <160063452+fzowl@users.noreply.github.com> Date: Thu, 16 Jan 2025 19:49:46 +0100 Subject: [PATCH 5/6] feature: Introducing VoyageAI (#1871) * Introducing VoyageAI's embedding models * Adding back the whitespaces * Adding the whitespaces back --- docs/concepts/knowledge.mdx | 1 + docs/concepts/memory.mdx | 20 +++++++++++++++++++ docs/how-to/llm-connections.mdx | 1 + .../utilities/embedding_configurator.py | 12 +++++++++++ 4 files changed, 34 insertions(+) diff --git a/docs/concepts/knowledge.mdx b/docs/concepts/knowledge.mdx index 254a56bb81..e4e40ba3ea 100644 --- a/docs/concepts/knowledge.mdx +++ b/docs/concepts/knowledge.mdx @@ -288,6 +288,7 @@ The `embedder` parameter supports various embedding model providers that include - `ollama`: Local embeddings with Ollama - `vertexai`: Google Cloud VertexAI embeddings - `cohere`: Cohere's embedding models +- `voyageai`: VoyageAI's embedding models - `bedrock`: AWS Bedrock embeddings - `huggingface`: Hugging Face models - `watson`: IBM Watson embeddings diff --git a/docs/concepts/memory.mdx b/docs/concepts/memory.mdx index b04b29c646..751b6dd2eb 100644 --- a/docs/concepts/memory.mdx +++ b/docs/concepts/memory.mdx @@ -293,6 +293,26 @@ my_crew = Crew( } ) ``` +### Using VoyageAI embeddings + +```python Code +from crewai import Crew, Agent, Task, Process + +my_crew = Crew( + agents=[...], + tasks=[...], + process=Process.sequential, + memory=True, + verbose=True, + embedder={ + "provider": "voyageai", + "config": { + "api_key": "YOUR_API_KEY", + "model_name": "" + } + } +) +``` ### Using HuggingFace embeddings ```python Code diff --git a/docs/how-to/llm-connections.mdx b/docs/how-to/llm-connections.mdx index 25509c2992..33be323b75 100644 --- a/docs/how-to/llm-connections.mdx +++ b/docs/how-to/llm-connections.mdx @@ -23,6 +23,7 @@ LiteLLM supports a wide range of providers, including but not limited to: - Azure OpenAI - AWS (Bedrock, SageMaker) - Cohere +- VoyageAI - Hugging Face - Ollama - Mistral AI diff --git a/src/crewai/utilities/embedding_configurator.py b/src/crewai/utilities/embedding_configurator.py index 44e832ec2d..71965bf536 100644 --- a/src/crewai/utilities/embedding_configurator.py +++ b/src/crewai/utilities/embedding_configurator.py @@ -14,6 +14,7 @@ def __init__(self): "vertexai": self._configure_vertexai, "google": self._configure_google, "cohere": self._configure_cohere, + "voyageai": self._configure_voyageai, "bedrock": self._configure_bedrock, "huggingface": self._configure_huggingface, "watson": self._configure_watson, @@ -124,6 +125,17 @@ def _configure_cohere(config, model_name): api_key=config.get("api_key"), ) + @staticmethod + def _configure_voyageai(config, model_name): + from chromadb.utils.embedding_functions.voyageai_embedding_function import ( + VoyageAIEmbeddingFunction, + ) + + return VoyageAIEmbeddingFunction( + model_name=model_name, + api_key=config.get("api_key"), + ) + @staticmethod def _configure_bedrock(config, model_name): from chromadb.utils.embedding_functions.amazon_bedrock_embedding_function import ( From 30d027158a04e633b42b21f246478a9af61a9ce1 Mon Sep 17 00:00:00 2001 From: "Brandon Hancock (bhancock_ai)" <109994880+bhancockio@users.noreply.github.com> Date: Thu, 16 Jan 2025 15:54:16 -0500 Subject: [PATCH 6/6] Fix union issue that Daniel was running into (#1910) --- src/crewai/utilities/converter.py | 12 +++++++++--- tests/utilities/test_converter.py | 9 +++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/crewai/utilities/converter.py b/src/crewai/utilities/converter.py index e9f8c6b8e6..5a797d8a98 100644 --- a/src/crewai/utilities/converter.py +++ b/src/crewai/utilities/converter.py @@ -241,9 +241,13 @@ def describe_field(field_type): origin = get_origin(field_type) args = get_args(field_type) - if origin is Union and type(None) in args: + if origin is Union or (origin is None and len(args) > 0): + # Handle both Union and the new '|' syntax non_none_args = [arg for arg in args if arg is not type(None)] - return f"Optional[{describe_field(non_none_args[0])}]" + if len(non_none_args) == 1: + return f"Optional[{describe_field(non_none_args[0])}]" + else: + return f"Optional[Union[{', '.join(describe_field(arg) for arg in non_none_args)}]]" elif origin is list: return f"List[{describe_field(args[0])}]" elif origin is dict: @@ -252,8 +256,10 @@ def describe_field(field_type): return f"Dict[{key_type}, {value_type}]" elif isinstance(field_type, type) and issubclass(field_type, BaseModel): return generate_model_description(field_type) - else: + elif hasattr(field_type, "__name__"): return field_type.__name__ + else: + return str(field_type) fields = model.__annotations__ field_descriptions = [ diff --git a/tests/utilities/test_converter.py b/tests/utilities/test_converter.py index df906acd72..f661af9cde 100644 --- a/tests/utilities/test_converter.py +++ b/tests/utilities/test_converter.py @@ -588,3 +588,12 @@ def test_converter_with_function_calling(): assert output.name == "Eve" assert output.age == 35 instructor.to_pydantic.assert_called_once() + + +def test_generate_model_description_union_field(): + class UnionModel(BaseModel): + field: int | str | None + + description = generate_model_description(UnionModel) + expected_description = '{\n "field": int | str | None\n}' + assert description == expected_description