Update chat_template.jinja

#85
by qgallouedec HF Staff - opened

Small inconsistency when:

  • no developper instructions
  • tool used

To reproduce:

from transformers.utils import get_json_schema

def get_current_temperature(location: str, unit: str = "celsius") -> dict:
    """
    Test function

    Args:
        location: The location for which to get the current temperature.
        unit: The unit of temperature. Defaults to "celsius".

    Returns:
        dict: A dictionary containing the temperature, location, and unit.
    """
    return {"temperature": 19.9, "location": "San Francisco, California, United States", "unit": "celsius"}

schema = get_json_schema(get_current_temperature)

# ---

from openai_harmony import Conversation, HarmonyEncodingName, Message, Role, SystemContent, load_harmony_encoding, ReasoningEffort, DeveloperContent, ToolDescription

encoding = load_harmony_encoding(HarmonyEncodingName.HARMONY_GPT_OSS)

def get_developer_message(tools: list | None):
    developer_message = DeveloperContent.new()
    if tools is not None:
        harmony_tools = [ToolDescription.new(
            tool["function"]["name"],
            tool["function"]["description"],
            parameters=tool["function"]["parameters"]
        ) for tool in tools]
        developer_message = developer_message.with_function_tools(harmony_tools)
    return developer_message

system_message = (
    SystemContent.new()
    .with_model_identity("You are ChatGPT, a large language model trained by OpenAI.")
    .with_reasoning_effort(ReasoningEffort.MEDIUM)
    .with_conversation_start_date("2025-08-08")
    .with_knowledge_cutoff("2024-06")
    .with_required_channels(["analysis", "commentary", "final"])
)

convo = Conversation.from_messages(
    [
        Message.from_role_and_content(
            Role.SYSTEM, system_message
        ),
        Message.from_role_and_content(
            Role.DEVELOPER,
            get_developer_message(tools=[schema]),
        ),
        Message.from_role_and_content(
            Role.USER, "What is 1+1?"
        )
    ]
)

tokens = encoding.render_conversation_for_completion(convo, Role.ASSISTANT)
output = encoding.decode(tokens)

with open("reference.txt", "w") as f:
    f.write(output)

# ----

from transformers import AutoTokenizer
from transformers.utils import get_json_schema


tokenizer = AutoTokenizer.from_pretrained("openai/gpt-oss-20b")

messages = [
    {
        "role": "user",
        "content": "What is 1+1?",
    },

]


output = tokenizer.apply_chat_template(messages, tokenize=False, tools=[schema], add_generation_prompt=True)

with open("apply_chat_tempalte.txt", "w") as f:
    f.write(output)

Reference (harmony)

...
# Valid channels: analysis, commentary, final. Channel must be included for every message.
Calls to these tools must go to the commentary channel: 'functions'.<|end|><|start|>developer<|message|># Tools

## functions
...

Rendered with apply_chat_template:

...
# Valid channels: analysis, commentary, final. Channel must be included for every message.
Calls to these tools must go to the commentary channel: 'functions'.<|end|><|start|>developer<|message|>

# Tools

## functions
...
dkundel-openai changed pull request status to merged

Sign up or log in to comment