# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).
import oci
# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()
# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
config)
# Send the request to service, some parameters are not required, see API
# doc for more info
generate_text_response = generative_ai_inference_client.generate_text(
generate_text_details=oci.generative_ai_inference.models.GenerateTextDetails(
compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
serving_type="ON_DEMAND",
model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
inference_request=oci.generative_ai_inference.models.LlamaLlmInferenceRequest(
runtime_type="LLAMA",
prompt="EXAMPLE-prompt-Value",
is_stream=True,
num_generations=1,
is_echo=True,
top_k=0,
top_p=0.14962822,
temperature=282.40442,
frequency_penalty=1.5031948,
presence_penalty=1.0336109,
stop=["EXAMPLE--Value"],
log_probs=376,
max_tokens=574)),
opc_retry_token="EXAMPLE-opcRetryToken-Value",
opc_request_id="EFO8IEHYWNJWHWOXZNYQ<unique_ID>")
# Get the data from response
print(generate_text_response.data)