minpeter commited on
Commit
ae4696b
·
verified ·
1 Parent(s): 80fcbf7

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +41 -0
README.md ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ ```python
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ model_name = "minpeter/Llama-3.2-1B-chatml-tool-v3"
6
+
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+
10
+ input_text = """
11
+ <|im_start|>system
12
+ You are a function calling AI model. You are provided with function signatures within <tools> </tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions.
13
+ <tools>
14
+ [{"function": {"description": "Get the current weather in a given location", "name": "get_current_weather", "parameters": {"properties": {"location": {"description": "The city and state, e.g. San Francisco, CA", "type": "string"}, "unit": {"enum": ["celsius", "fahrenheit"], "type": "string"}}, "required": ["location"], "type": "object"}}, "type": "function"}]
15
+ </tools>
16
+ For each function call return a json object with function name and arguments within <tool_call> </tool_call> tags with the following schema:
17
+ <tool_call>
18
+ {'arguments': <args-dict>, 'name': <function-name>}
19
+ </tool_call><|im_end|>
20
+ <|im_start|>user
21
+ What is the weather like in Boston?<|im_end|>
22
+ <|im_start|>assistant
23
+ """
24
+
25
+ input_length = len(tokenizer.tokenize(input_text))
26
+
27
+ input_ids = tokenizer.encode(input_text, return_tensors="pt")
28
+
29
+ output = model.generate(input_ids, max_new_tokens=600)
30
+ generated_ids = output[0][input_length:]
31
+
32
+ generated_text = tokenizer.decode(generated_ids, skip_special_tokens=True)
33
+
34
+ tokens = tokenizer.tokenize(generated_text)
35
+ token_ids = tokenizer.convert_tokens_to_ids(tokens)
36
+
37
+ for token, id in zip(tokens, token_ids):
38
+ print(f"Token: {token:20} ID: {id}")
39
+
40
+ print(f"\nGenerated text:\n{generated_text}")
41
+ ```