# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

import oci

# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()


# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
    config)


# Send the request to service, some parameters are not required, see API
# doc for more info
chat_response = generative_ai_inference_client.chat(
    chat_details=oci.generative_ai_inference.models.ChatDetails(
        compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
        serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
            serving_type="ON_DEMAND",
            model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
        chat_request=oci.generative_ai_inference.models.GenericChatRequest(
            api_format="GENERIC",
            messages=[
                oci.generative_ai_inference.models.UserMessage(
                    role="USER",
                    content=[
                        oci.generative_ai_inference.models.ImageContent(
                            type="IMAGE",
                            image_url=oci.generative_ai_inference.models.ImageUrl(
                                url="EXAMPLE-url-Value",
                                detail="AUTO"))],
                    name="EXAMPLE-name-Value")],
            is_stream=False,
            num_generations=2,
            seed=592,
            is_echo=True,
            top_k=0,
            top_p=0.22750199,
            temperature=6240.9175,
            frequency_penalty=1.7431623,
            presence_penalty=1.7700758,
            stop=["EXAMPLE--Value"],
            log_probs=283,
            max_tokens=423,
            logit_bias="EXAMPLE-logitBias-Value",
            tool_choice=oci.generative_ai_inference.models.ToolChoiceRequired(
                type="REQUIRED"),
            tools=[
                oci.generative_ai_inference.models.FunctionDefinition(
                    type="FUNCTION",
                    name="EXAMPLE-name-Value",
                    description="EXAMPLE-description-Value",
                    parameters="EXAMPLE-parameters-Value")])),
    opc_retry_token="EXAMPLE-opcRetryToken-Value",
    opc_request_id="MDRFKG5ZIOPDISFY0KDQ<unique_ID>")

# Get the data from response
print(chat_response.data)

Was this article helpful?