diff --git a/litellm/llms/prompt_templates/factory.py b/litellm/llms/prompt_templates/factory.py index 2abb544095d7..d7f2272c4f58 100644 --- a/litellm/llms/prompt_templates/factory.py +++ b/litellm/llms/prompt_templates/factory.py @@ -1,7 +1,8 @@ from enum import Enum import requests, traceback import json, re, xml.etree.ElementTree as ET -from jinja2 import Template, exceptions, Environment, meta +from jinja2 import Template, exceptions, meta, BaseLoader +from jinja2.sandbox import ImmutableSandboxedEnvironment from typing import Optional, Any import imghdr, base64 from typing import List @@ -219,6 +220,15 @@ def phind_codellama_pt(messages): def hf_chat_template(model: str, messages: list, chat_template: Optional[Any] = None): + # Define Jinja2 environment + env = ImmutableSandboxedEnvironment() + + def raise_exception(message): + raise Exception(f"Error message - {message}") + + # Create a template object from the template text + env.globals["raise_exception"] = raise_exception + ## get the tokenizer config from huggingface bos_token = "" eos_token = "" @@ -249,12 +259,6 @@ def _get_tokenizer_config(hf_model_name): eos_token = tokenizer_config["eos_token"] chat_template = tokenizer_config["chat_template"] - def raise_exception(message): - raise Exception(f"Error message - {message}") - - # Create a template object from the template text - env = Environment() - env.globals["raise_exception"] = raise_exception try: template = env.from_string(chat_template) except Exception as e: