# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

import oci

# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()


# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
    config)


# Send the request to service, some parameters are not required, see API
# doc for more info
generate_text_response = generative_ai_inference_client.generate_text(
    generate_text_details=oci.generative_ai_inference.models.GenerateTextDetails(
        compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
        serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
            serving_type="ON_DEMAND",
            model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
        inference_request=oci.generative_ai_inference.models.CohereLlmInferenceRequest(
            runtime_type="COHERE",
            prompt="EXAMPLE-prompt-Value",
            is_stream=False,
            num_generations=2,
            is_echo=False,
            max_tokens=651,
            temperature=1.8789818,
            top_k=1,
            top_p=0.26159215,
            frequency_penalty=0.89666796,
            presence_penalty=0.48836064,
            stop_sequences=["EXAMPLE--Value"],
            return_likelihoods="NONE",
            truncate="NONE")),
    opc_retry_token="EXAMPLE-opcRetryToken-Value",
    opc_request_id="YR0ZNLQNT2LLK34VSCEJ<unique_ID>")

# Get the data from response
print(generate_text_response.data)