# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

require 'oci'

# Create a default config using DEFAULT profile in default location
# Refer to https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File for more info
config = OCI::ConfigFileLoader.load_config

# Initialize service client with default config file
generative_ai_inference_client =
  OCI::GenerativeAiInference::GenerativeAiInferenceClient.new(config: config)

# Send the request to service, some parameters are not required, see API doc for more info
generate_text_response =
  generative_ai_inference_client.generate_text(
    OCI::GenerativeAiInference::Models::GenerateTextDetails.new(
      compartment_id: 'ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value',
      serving_mode:
        OCI::GenerativeAiInference::Models::OnDemandServingMode.new(
          serving_type: 'ON_DEMAND',
          model_id: 'ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value'
        ),
      inference_request:
        OCI::GenerativeAiInference::Models::CohereLlmInferenceRequest.new(
          runtime_type: 'COHERE',
          prompt: 'EXAMPLE-prompt-Value',
          is_stream: false,
          num_generations: 2,
          is_echo: false,
          max_tokens: 651,
          temperature: 1.8789818,
          top_k: 1,
          top_p: 0.26159215,
          frequency_penalty: 0.89666796,
          presence_penalty: 0.48836064,
          stop_sequences: %w[EXAMPLE--Value],
          return_likelihoods: 'NONE',
          truncate: 'NONE'
        )
    )
  )

# Get the data from response
puts "#{generate_text_response.data}"

Was this article helpful?