-
Notifications
You must be signed in to change notification settings - Fork 4.3k
Open
Labels
bugSomething isn't workingSomething isn't working
Description
Confirm this is an issue with the Python library and not an underlying OpenAI API
- This is an issue with the Python library
Describe the bug
Under the gpt5 model, the completion interface cannot utilize the custom tool's large capability
I have confirmed multiple times that my format meets the API interface requirements, and the API interface is: https://platform.openai.com/docs/api-reference/chat/create#chat -create-tools-custom-tool-custom-format
The interface response result is: Error code: 400 - {'error': {'message': 'Invalid param', 'type': 'invalid_request_error', 'param': 'c44c810b-15b6-4c62-8e6d-1b8ee2ceece8', 'code': 'param_error'}}
But it is unclear which field is the problem and how to fix it. Please provide the correct usage method
To Reproduce
Run the code
Code snippets
definition = """
start: expr
expr: term (SP ADD SP term)* -> add
| term
term: factor (SP MUL SP factor)* -> mul
| factor
factor: INT
SP: " "
ADD: "+"
MUL: "*"
%import common.INT
"""
ebnf_description = "Creates valid Lark grammar."
def custom_tool_ebnf_grammar_1():
return {
"type": "custom",
"custom": {
"name": "ebnf_grammar",
"description": ebnf_description,
"format": {
"type": "grammar",
"grammar": {
"syntax": "lark",
"definition": definition,
}
},
}
}
def custom_tool_ebnf_grammar_2():
return {
"type": "custom",
"custom": {
"name": "ebnf_grammar",
"description": ebnf_description,
"format": {
"type": "grammar",
"syntax": "lark",
"definition": definition,
},
}
}
def custom_tool_ebnf_grammar_3():
return {
"type": "custom",
"name": "ebnf_grammar",
"description": ebnf_description,
"format": {
"type": "grammar",
"syntax": "lark",
"definition": definition,
},
}
client = OpenAI()
ebnf_query = "use the ebnf_grammar tool to add four plus four."
def test_completion_chat_ebnf_grammar_1():
print("===== start test_completion_chat_ebnf_grammar_1 ===== ")
response = client.chat.completions.create(
model="gpt-5",
messages=[{"role": "user", "content": ebnf_query}],
tools=[custom_tool_ebnf_grammar_1()],
parallel_tool_calls=False,
tool_choice="required"
)
print(response.choices[0].message.content)
print("===== end test_completion_chat_ebnf_grammar_1 ===== ")
def test_completion_chat_ebnf_grammar_2():
print("===== start test_completion_chat_ebnf_grammar_2 ===== ")
response = client.chat.completions.create(
model="gpt-5",
messages=[{"role": "user", "content": ebnf_query}],
tools=[custom_tool_ebnf_grammar_2()],
parallel_tool_calls=False,
tool_choice="required"
)
print(response.choices[0].message.content)
print("===== end test_completion_chat_ebnf_grammar_2 ===== ")
def test_completion_chat_ebnf_grammar_3():
print("===== start test_completion_chat_ebnf_grammar_3 ===== ")
response = client.chat.completions.create(
model="gpt-5",
messages=[{"role": "user", "content": ebnf_query}],
tools=[custom_tool_ebnf_grammar_3()],
parallel_tool_calls=False,
tool_choice="required"
)
print(response.choices[0].message.content)
print("===== end test_completion_chat_ebnf_grammar_3 ===== ")
OS
macOS
Python version
Python 3.9
Library version
openai 1.108.1
Metadata
Metadata
Assignees
Labels
bugSomething isn't workingSomething isn't working