Cancel a run
Deprecated
beta.threads.runs.cancel(strrun_id, RunCancelParams**kwargs) -> Run
POST/threads/{thread_id}/runs/{run_id}/cancel
Cancels a run that is in_progress.
Parameters
thread_id: str
run_id: str
Returns
Cancel a run
import os
from openai import OpenAI
client = OpenAI(
api_key=os.environ.get("OPENAI_API_KEY"), # This is the default and can be omitted
)
run = client.beta.threads.runs.cancel(
run_id="run_id",
thread_id="thread_id",
)
print(run.id){
"id": "id",
"assistant_id": "assistant_id",
"cancelled_at": 0,
"completed_at": 0,
"created_at": 0,
"expires_at": 0,
"failed_at": 0,
"incomplete_details": {
"reason": "max_completion_tokens"
},
"instructions": "instructions",
"last_error": {
"code": "server_error",
"message": "message"
},
"max_completion_tokens": 256,
"max_prompt_tokens": 256,
"metadata": {
"foo": "string"
},
"model": "model",
"object": "thread.run",
"parallel_tool_calls": true,
"required_action": {
"submit_tool_outputs": {
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
},
"type": "submit_tool_outputs"
},
"response_format": "auto",
"started_at": 0,
"status": "queued",
"thread_id": "thread_id",
"tool_choice": "none",
"tools": [
{
"type": "code_interpreter"
}
],
"truncation_strategy": {
"type": "auto",
"last_messages": 1
},
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
},
"temperature": 0,
"top_p": 0
}Returns Examples
{
"id": "id",
"assistant_id": "assistant_id",
"cancelled_at": 0,
"completed_at": 0,
"created_at": 0,
"expires_at": 0,
"failed_at": 0,
"incomplete_details": {
"reason": "max_completion_tokens"
},
"instructions": "instructions",
"last_error": {
"code": "server_error",
"message": "message"
},
"max_completion_tokens": 256,
"max_prompt_tokens": 256,
"metadata": {
"foo": "string"
},
"model": "model",
"object": "thread.run",
"parallel_tool_calls": true,
"required_action": {
"submit_tool_outputs": {
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
},
"type": "submit_tool_outputs"
},
"response_format": "auto",
"started_at": 0,
"status": "queued",
"thread_id": "thread_id",
"tool_choice": "none",
"tools": [
{
"type": "code_interpreter"
}
],
"truncation_strategy": {
"type": "auto",
"last_messages": 1
},
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
},
"temperature": 0,
"top_p": 0
}