1. Use Cases

The Function Calling feature allows the model to call external tools to enhance its capabilities. This functionality enables the model to act as a brain that calls external tools (such as searching for external knowledge, checking itineraries, or specific domain tools), effectively addressing issues like hallucinations and knowledge timeliness.

2. How to Use

2.1 Add tools parameters via REST API

Include the following in the request body:

"tools": [
    {
        'type': 'function',
        'function': {
            'name': 'name of the actual function to execute',
            'description': 'Description of the function',
            'parameters': {
                '_comments': 'Description of the function parameters'
            },
        }
    },
    {
        '_comments': 'Additional function-related notes'
    }
]

For example, a complete payload:

payload = {
    "model": "deepseek-ai/DeepSeek-V2.5",
    "messages": [
        {
            "role": "user",
            "content": "What opportunities and challenges will the global large-scale AI model industry encounter in 2025?"
        }
    ],
    "tools": [
    {
        'type': 'function',
        'function': {
            'name': 'name of the actual function to execute',
            'description': 'Description of the function',
            'parameters': {
                '_comments': 'Description of the function parameters'
            },
        }
    },
    {
        '_comments': 'Additional function-related notes'
    }
    ]
    '_comments': 'List of other functions'
}

2.2 Use with OpenAI Library

This feature is compatible with OpenAI. When using the OpenAI library, add the corresponding tools parameter as tools=[corresponding tools]. For example:

response = client.chat.completions.create(
    model="deepseek-ai/DeepSeek-V2.5",
    messages=messages,
    tools=[
        {
            'type': 'function',
            'function': {
                'name': 'name of the actual function to execute',
                'description': 'Description of the function',
                'parameters': {
                    // Description of the function parameters
                },
            }
        },
        {
            // Additional function-related notes
        }
    ]
    // Other chat.completions parameters
)

3. Supported Models

Currently supported models include:

  • Qwen Series:
    • Qwen/Qwen2.5-72B-Instruct
    • Qwen/Qwen2.5-32B-Instruct
    • Qwen/Qwen2.5-14B-Instruct
    • Qwen/Qwen2.5-7B-Instruct
Note: The list of supported models is continuously updated. Please refer to this document for the latest list of supported models.

4. Examples

4.1 Example 1: Extending numerical computation capabilities of large language models via function calling

This code introduces four functions: addition, subtraction, comparison, and counting repeated letters in a string, demonstrating how function calling can address areas where large language models struggle, such as token prediction.


from openai import OpenAI

client = OpenAI(
    api_key="Your APIKEY", # Obtain from https://cloud.siliconflow.com/account/ak
    base_url="https://api.ap.siliconflow.com/v1"
)

def add(a: float, b: float):
    return a + b

def mul(a: float, b: float):
    return a * b

def compare(a: float, b: float):
    if a > b:
        return f'{a} is greater than {b}'
    elif a < b:
        return f'{b} is greater than {a}'
    else:
        return f'{a} is equal to {b}'

def count_letter_in_string(a: str, b: str):
    string = a.lower()
    letter = b.lower()
    
    count = string.count(letter)
    return(f"The letter '{letter}' appears {count} times in the string.")


tools = [
{
    'type': 'function',
    'function': {
        'name': 'add',
        'description': 'Compute the sum of two numbers',
        'parameters': {
            'type': 'object',
            'properties': {
                'a': {
                    'type': 'int',
                    'description': 'A number',
                },
                'b': {
                    'type': 'int',
                    'description': 'A number',
                },
            },
            'required': ['a', 'b'],
        },
    }
}, 
{
    'type': 'function',
    'function': {
        'name': 'mul',
        'description': 'Calculate the product of two numbers',
        'parameters': {
            'type': 'object',
            'properties': {
                'a': {
                    'type': 'int',
                    'description': 'A number',
                },
                'b': {
                    'type': 'int',
                    'description': 'A number',
                },
            },
            'required': ['a', 'b'],
        },
    }
},
{
    'type': 'function',
    'function': {
        'name': 'count_letter_in_string',
        'description': 'Count letter number in a string',
        'parameters': {
            'type': 'object',
            'properties': {
                'a': {
                    'type': 'str',
                    'description': 'source string',
                },
                'b': {
                    'type': 'str',
                    'description': 'letter',
                },
            },
            'required': ['a', 'b'],
        },
    }
},
{
    'type': 'function',
    'function': {
        'name': 'compare',
        'description': 'Compare two numbers and determine which is larger',
        'parameters': {
            'type': 'object',
            'properties': {
                'a': {
                    'type': 'float',
                    'description': 'A number',
                },
                'b': {
                    'type': 'float',
                    'description': 'A number',
                },
            },
            'required': ['a', 'b'],
        },
    }
}
]

def function_call_playground(prompt):
    messages = [{'role': 'user', 'content': prompt}]
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-V2.5",
        messages=messages,
        temperature=0.01,
        top_p=0.95,
        stream=False,
        tools=tools)

    func1_name = response.choices[0].message.tool_calls[0].function.name
    func1_args = response.choices[0].message.tool_calls[0].function.arguments
    func1_out = eval(f'{func1_name}(**{func1_args})')

    messages.append(response.choices[0].message)
    messages.append({
        'role': 'tool',
        'content': f'{func1_out}',
        'tool_call_id': response.choices[0].message.tool_calls[0].id
    })

    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-V2.5",
        messages=messages,
        temperature=0.01,
        top_p=0.95,
        stream=False,
        tools=tools)
    return response.choices[0].message.content
  
prompts = [
    "In Chinese: How many 'r's are in the word 'strawberry'?", 
    "In Chinese: Which is smaller, 9.11 or 9.9?"
]

for prompt in prompts:
    print(function_call_playground(prompt))

The model will output:

There are 3 'r's in the word 'strawberry'.
9.11 is smaller than 9.9.

4.2 Example 2: Extending the model’s understanding of external environments through function calling

This code demonstrates querying external information using one function via an external API.

import requests
from openai import OpenAI

client = OpenAI(
    api_key="Your APIKEY", # Obtain from https://cloud.siliconflow.com/account/ak
    base_url="https://api.ap.siliconflow.com/v1"
)

# Weather query function using WeatherAPI
def get_weather(city: str):
    api_key = "Your WeatherAPI APIKEY"  # Replace with your own WeatherAPI APIKEY
    base_url = "http://api.weatherapi.com/v1/current.json"
    params = {
        'key': api_key,
        'q': city,
        'aqi': 'no'  # No air quality data needed
    }
    
    response = requests.get(base_url, params=params)
    
    if response.status_code == 200:
        data = response.json()
        weather = data['current']['condition']['text']
        temperature = data['current']['temp_c']
        return f"The weather in {city} is {weather} with a temperature of {temperature}°C."
    else:
        return f"Could not retrieve weather information for {city}."

tools = [
    {
        'type': 'function',
        'function': {
            'name': 'get_weather',
            'description': 'Get the current weather for a given city.',
            'parameters': {
                'type': 'object',
                'properties': {
                    'city': {
                        'type': 'string',
                        'description': 'The name of the city to query weather for.',
                    },
                },
                'required': ['city'],
            },
        }
    }
]

def function_call_playground(prompt):
    messages = [{'role': 'user', 'content': prompt}]
    
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-V2.5",
        messages=messages,
        temperature=0.01,
        top_p=0.95,
        stream=False,
        tools=tools
    )

    func1_name = response.choices[0].message.tool_calls[0].function.name
    func1_args = response.choices[0].message.tool_calls[0].function.arguments
    func1_out = eval(f'{func1_name}(**{func1_args})')

    messages.append(response.choices[0].message)
    messages.append({
        'role': 'tool',
        'content': f'{func1_out}',
        'tool_call_id': response.choices[0].message.tool_calls[0].id
    })
    
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-V2.5",
        messages=messages,
        temperature=0.01,
        top_p=0.95,
        stream=False,
        tools=tools
    )
    
    return response.choices[0].message.content

prompt = "How is the weather today in New York?"
print(function_call_playground(prompt))

The model will output:

The weather in New York today is cloudy with a temperature of 68.2°F.