# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).
import oci
# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()
# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
config)
# Send the request to service, some parameters are not required, see API
# doc for more info
chat_response = generative_ai_inference_client.chat(
chat_details=oci.generative_ai_inference.models.ChatDetails(
compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
serving_type="ON_DEMAND",
model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
chat_request=oci.generative_ai_inference.models.GenericChatRequest(
api_format="GENERIC",
messages=[
oci.generative_ai_inference.models.ToolMessage(
role="TOOL",
content=[
oci.generative_ai_inference.models.TextContent(
type="TEXT",
text="EXAMPLE-text-Value")],
tool_call_id="ocid1.test.oc1..<unique_ID>EXAMPLE-toolCallId-Value")],
reasoning_effort="LOW",
verbosity="HIGH",
metadata="EXAMPLE-metadata-Value",
is_stream=False,
stream_options=oci.generative_ai_inference.models.StreamOptions(
is_include_usage=True),
num_generations=4,
seed=790,
is_echo=True,
top_k=0,
top_p=0.45642453,
temperature=0.028240442,
frequency_penalty=1.1442062,
presence_penalty=1.4775215,
stop=["EXAMPLE--Value"],
log_probs=943,
max_tokens=181,
max_completion_tokens=833,
logit_bias="EXAMPLE-logitBias-Value",
prediction=oci.generative_ai_inference.models.StaticContent(
type="CONTENT",
content=[
oci.generative_ai_inference.models.TextContent(
type="TEXT",
text="EXAMPLE-text-Value")]),
response_format=oci.generative_ai_inference.models.JsonObjectResponseFormat(
type="JSON_OBJECT"),
tool_choice=oci.generative_ai_inference.models.ToolChoiceAuto(
type="AUTO"),
is_parallel_tool_calls=True,
tools=[
oci.generative_ai_inference.models.FunctionDefinition(
type="FUNCTION",
name="EXAMPLE-name-Value",
description="EXAMPLE-description-Value",
parameters="EXAMPLE-parameters-Value")])),
opc_retry_token="EXAMPLE-opcRetryToken-Value",
opc_request_id="HFVWUGKMIX2HFEXJSLB7<unique_ID>")
# Get the data from response
print(chat_response.data)