# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

import oci

# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()


# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
    config)


# Send the request to service, some parameters are not required, see API
# doc for more info
chat_response = generative_ai_inference_client.chat(
    chat_details=oci.generative_ai_inference.models.ChatDetails(
        compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
        serving_mode=oci.generative_ai_inference.models.DedicatedServingMode(
            serving_type="DEDICATED",
            endpoint_id="ocid1.test.oc1..<unique_ID>EXAMPLE-endpointId-Value"),
        chat_request=oci.generative_ai_inference.models.GenericChatRequest(
            api_format="GENERIC",
            messages=[
                oci.generative_ai_inference.models.SystemMessage(
                    role="SYSTEM",
                    content=[
                        oci.generative_ai_inference.models.TextContent(
                            type="TEXT",
                            text="EXAMPLE-text-Value")],
                    name="EXAMPLE-name-Value")],
            is_stream=True,
            num_generations=2,
            is_echo=False,
            top_k=0,
            top_p=0.54156446,
            temperature=8375.855,
            frequency_penalty=1.2680095,
            presence_penalty=1.6007161,
            stop=["EXAMPLE--Value"],
            log_probs=717,
            max_tokens=752,
            logit_bias="EXAMPLE-logitBias-Value")),
    opc_retry_token="EXAMPLE-opcRetryToken-Value",
    opc_request_id="FSCOC5TVRYJCP4ZHLKVC<unique_ID>")

# Get the data from response
print(chat_response.data)