Skip to content

Commit 8eb37f3

Browse files
Releasing version 2.102.1
Releasing version 2.102.1
2 parents faf1c6a + a16a644 commit 8eb37f3

File tree

341 files changed

+7981
-799
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

341 files changed

+7981
-799
lines changed

CHANGELOG.md

+361-252
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
/**
2+
* Copyright (c) 2020, 2021 Oracle and/or its affiliates. All rights reserved.
3+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
4+
*/
5+
6+
/*
7+
* This example demonstrates to get a response for the given conversation with Cohere llm.
8+
* In order to run this example, this code must be in an Oracle Cloud instance.
9+
*/
10+
11+
import { GenerativeAiInferenceClient, models, requests } from "oci-generativeaiinference";
12+
import {
13+
SessionAuthDetailProvider,
14+
NoRetryConfigurationDetails
15+
} from "oci-common";
16+
17+
// TODO: Please update config profile name and use the compartmentId that has policies grant permissions for using Generative AI Service
18+
const CONFIG_LOCATION = "~/.oci/config";
19+
const CONFIG_PROFILE = "DEFAULT";
20+
const COMPARTMENT_ID = ""; // Fill-in with compartment Id with access to generative AI API
21+
22+
(async () => {
23+
// Configuring the AuthenticationDetailsProvider. It's assuming there is a default OCI config file "~/.oci/config", and
24+
// a profile in that config with the name defined in CONFIG_PROFILE variable.
25+
const provider = new SessionAuthDetailProvider(CONFIG_LOCATION, CONFIG_PROFILE)
26+
provider.setRegion("us-chicago-1");
27+
28+
const client = new GenerativeAiInferenceClient({
29+
authenticationDetailsProvider: provider,
30+
}
31+
);
32+
33+
// On Demand Serving Mode
34+
// Check a list of pretrained Cohere Chat Models availability in different regions:
35+
// https://docs.oracle.com/en-us/iaas/Content/generative-ai/pretrained-models.htm#pretrained-models
36+
const servingMode: models.OnDemandServingMode = {
37+
modelId: "cohere.command-r-08-2024",
38+
servingType: "ON_DEMAND",
39+
};
40+
41+
// Dedicated Serving Mode
42+
// const servingMode: models.DedicatedServingMode = {
43+
// endpointId: "", // Fill-in with endpoint Id, if you have active Dedicated AI cluster resource
44+
// servingType: "DEDICATED",
45+
// };
46+
47+
// Chat Details
48+
const chatRequest: requests.ChatRequest = {
49+
chatDetails: {
50+
compartmentId: COMPARTMENT_ID,
51+
servingMode: servingMode,
52+
chatRequest: {
53+
message: "As a corporate vice president, generate an email congratulating a team that has just shipped a new cloud service. Emphasize the great positive impact the new service will have on the productivity of their customers.",
54+
apiFormat: "COHERE",
55+
maxTokens: 600,
56+
temperature: 0,
57+
frequencyPenalty: 1,
58+
presencePenalty: 0,
59+
topP: 0.75,
60+
topK: 0,
61+
seed: 0,
62+
}
63+
},
64+
retryConfiguration: NoRetryConfigurationDetails
65+
};
66+
67+
const chatResponse = await client.chat(chatRequest);
68+
69+
// Print chat response
70+
console.log("**************************Chat Response**************************");
71+
console.log(JSON.stringify(chatResponse));
72+
73+
})();
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
/**
2+
* Copyright (c) 2020, 2021 Oracle and/or its affiliates. All rights reserved.
3+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
4+
*/
5+
6+
/*
7+
* This example demonstrates to get a response for the given conversation with Meta Llama Models llm.
8+
* In order to run this example, this code must be in an Oracle Cloud instance.
9+
*/
10+
11+
import { GenerativeAiInferenceClient, models, requests } from "oci-generativeaiinference";
12+
import {
13+
SessionAuthDetailProvider,
14+
NoRetryConfigurationDetails
15+
} from "oci-common";
16+
17+
// TODO: Please update config profile name and use the compartmentId that has policies grant permissions for using Generative AI Service
18+
const CONFIG_LOCATION = "~/.oci/config";
19+
const CONFIG_PROFILE = "DEFAULT";
20+
const COMPARTMENT_ID = ""; // Fill-in with compartment Id with access to generative AI API
21+
22+
(async () => {
23+
// Configuring the AuthenticationDetailsProvider. It's assuming there is a default OCI config file "~/.oci/config", and
24+
// a profile in that config with the name defined in CONFIG_PROFILE variable.
25+
const provider = new SessionAuthDetailProvider(CONFIG_LOCATION, CONFIG_PROFILE)
26+
provider.setRegion("us-chicago-1");
27+
28+
const client = new GenerativeAiInferenceClient({
29+
authenticationDetailsProvider: provider,
30+
}
31+
);
32+
33+
// On Demand Serving Mode
34+
// Check a list of pretrained Meta Llama Chat Models availability in different regions:
35+
// https://docs.oracle.com/en-us/iaas/Content/generative-ai/pretrained-models.htm#pretrained-models
36+
const servingMode: models.OnDemandServingMode = {
37+
modelId: "meta.llama-3.2-90b-vision-instruct",
38+
servingType: "ON_DEMAND",
39+
};
40+
41+
// Dedicated Serving Mode
42+
// const servingMode: models.DedicatedServingMode = {
43+
// endpointId: "", // Fill-in with endpoint Id, if you have active Dedicated AI cluster resource
44+
// servingType: "DEDICATED",
45+
// };
46+
47+
// Chat Details
48+
const chatRequest: requests.ChatRequest = {
49+
chatDetails: {
50+
compartmentId: COMPARTMENT_ID,
51+
servingMode: servingMode,
52+
chatRequest: {
53+
messages: [
54+
{
55+
role: "USER",
56+
content: [
57+
{
58+
type: "TEXT",
59+
// @ts-ignore
60+
text: "As a corporate vice president, generate an email congratulating a team that has just shipped a new cloud service. Emphasize the great positive impact the new service will have on the productivity of their customers.",
61+
}
62+
]
63+
}
64+
],
65+
apiFormat: "GENERIC",
66+
maxTokens: 600,
67+
temperature: 0,
68+
frequencyPenalty: 1,
69+
presencePenalty: 0,
70+
topP: 0.75,
71+
topK: -1,
72+
seed: 0,
73+
}
74+
},
75+
retryConfiguration: NoRetryConfigurationDetails
76+
};
77+
78+
const chatResponse = await client.chat(chatRequest);
79+
80+
// Print chat response
81+
console.log("**************************Chat Response**************************");
82+
console.log(JSON.stringify(chatResponse));
83+
84+
})();
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
/**
2+
* Copyright (c) 2020, 2021 Oracle and/or its affiliates. All rights reserved.
3+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
4+
*/
5+
6+
/*
7+
* This example demonstrates to get embeddings response for the given inputs to Cohere llm.
8+
* In order to run this example, this code must be in an Oracle Cloud instance.
9+
*/
10+
11+
import { GenerativeAiInferenceClient, models, requests } from "oci-generativeaiinference";
12+
import {
13+
SessionAuthDetailProvider,
14+
NoRetryConfigurationDetails
15+
} from "oci-common";
16+
17+
// TODO: Please update config profile name and use the compartmentId that has policies grant permissions for using Generative AI Service
18+
const CONFIG_LOCATION = "~/.oci/config";
19+
const CONFIG_PROFILE = "DEFAULT";
20+
const COMPARTMENT_ID = ""; // Fill-in with compartment Id with access to generative AI API
21+
22+
(async () => {
23+
// Configuring the AuthenticationDetailsProvider. It's assuming there is a default OCI config file "~/.oci/config", and
24+
// a profile in that config with the name defined in CONFIG_PROFILE variable.
25+
const provider = new SessionAuthDetailProvider(CONFIG_LOCATION, CONFIG_PROFILE)
26+
provider.setRegion("us-chicago-1");
27+
28+
const client = new GenerativeAiInferenceClient({
29+
authenticationDetailsProvider: provider,
30+
}
31+
);
32+
33+
// On Demand Serving Mode
34+
// Check a list of pretrained Cohere Embedding Models availability in different regions:
35+
// https://docs.oracle.com/en-us/iaas/Content/generative-ai/pretrained-models.htm#pretrained-models
36+
const servingMode: models.OnDemandServingMode = {
37+
modelId: "cohere.embed-english-v3.0",
38+
servingType: "ON_DEMAND",
39+
};
40+
41+
// Dedicated Serving Mode
42+
// const servingMode: models.DedicatedServingMode = {
43+
// endpointId: "", // Fill-in with endpoint Id, if you have active Dedicated AI cluster resource
44+
// servingType: "DEDICATED",
45+
// };
46+
47+
// Embed Details
48+
const embedRequest: requests.EmbedTextRequest = {
49+
embedTextDetails: {
50+
inputs: ["In order to maintain our growth, we need to track our billings to ensure we are charging our customers enough to support our business."," We have a system in place to track our billings and ensure we are billing our customers accurately."," We have a dedicated billing team that is responsible for generating invoices and tracking payments."," Our billing system is integrated with our customer relationship management (CRM) system, which allows us to track our billings and customer interactions in one place."," We use a third-party billing service to help us manage our billings and ensure we are billing our customers correctly."," We are committed to providing our customers with accurate billings and clear explanations of our charges."," Timely and accurate billing is important to our customers, and we strive to provide them with the best possible service."," We are constantly looking for ways to improve our billing process and ensure we are billing our customers fairly."," We are committed to being transparent with our customers about our billing process and how we calculate our charges."," Billing can be a complex process, and we are here to help our customers understand their bills and answer any questions they may have."," We value our customers and want to ensure that they are happy with our billing process and the services we provide."],
51+
truncate: models.EmbedTextDetails.Truncate.None,
52+
servingMode: servingMode,
53+
compartmentId: COMPARTMENT_ID,
54+
},
55+
retryConfiguration: NoRetryConfigurationDetails
56+
}
57+
58+
const embedResponse = await client.embedText(embedRequest)
59+
60+
// Print embed text response
61+
console.log("**************************Embed Texts Response**************************")
62+
console.log(JSON.stringify(embedResponse))
63+
64+
})();

lib/accessgovernancecp/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-accessgovernancecp",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Access Governance Cp Service",
55
"repository": {
66
"type": "git",

lib/adm/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-adm",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Adm Service",
55
"repository": {
66
"type": "git",

lib/aianomalydetection/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-aianomalydetection",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Ai Anomaly Detection Service",
55
"repository": {
66
"type": "git",

lib/aidocument/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-aidocument",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Ai Document Service",
55
"repository": {
66
"type": "git",

lib/ailanguage/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-ailanguage",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Ai Language Service",
55
"repository": {
66
"type": "git",

lib/aispeech/lib/model/realtime-parameters.ts

+15-1
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,22 @@ export interface RealtimeParameters {
6464
"languageCode"?: string;
6565
/**
6666
* If set to true, the service will not fail connection attempt if it encounters any issues that prevent the loading of all specified user customizations. Any invalid customizations will simply be ignored and connection will continue being established with the default base model and any remaining valid customizations.
67-
* If set to false, if the service is unable to load any of the specified customizations, an error detailing why will be returned and the session will end.
67+
* If set to false, if the service is unable to load any of the specified customizations, an error detailing why will be returned and the session will end.
6868
*
6969
*/
7070
"shouldIgnoreInvalidCustomizations"?: boolean;
7171
/**
7272
* Array of customization objects.
7373
*/
7474
"customizations"?: Array<model.CustomizationInference>;
75+
/**
76+
* Configure punctuations in the generated transcriptions. Disabled by default.
77+
* - NONE: No punctuation in the transcription response
78+
* - SPOKEN: Punctuations in response only when verbally spoken
79+
* - AUTO: Automatic punctuation in the response, spoken punctuations are disabled
80+
*
81+
*/
82+
"punctuation"?: RealtimeParameters.Punctuation;
7583
}
7684

7785
export namespace RealtimeParameters {
@@ -87,6 +95,12 @@ export namespace RealtimeParameters {
8795
Medical = "MEDICAL"
8896
}
8997

98+
export enum Punctuation {
99+
None = "NONE",
100+
Spoken = "SPOKEN",
101+
Auto = "AUTO"
102+
}
103+
90104
export function getJsonObj(obj: RealtimeParameters): object {
91105
const jsonObj = {
92106
...obj,

lib/aispeech/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-aispeech",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Ai Speech Service",
55
"repository": {
66
"type": "git",

lib/aivision/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-aivision",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Ai Vision Service",
55
"repository": {
66
"type": "git",

lib/analytics/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-analytics",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Analytics Service",
55
"repository": {
66
"type": "git",

lib/announcementsservice/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-announcementsservice",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Announcement Service",
55
"repository": {
66
"type": "git",

lib/apigateway/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-apigateway",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for API gateway service",
55
"repository": {
66
"type": "git",

lib/apmconfig/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-apmconfig",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Apm Config Service",
55
"repository": {
66
"type": "git",

lib/apmcontrolplane/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-apmcontrolplane",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Apm Control Plane Service",
55
"repository": {
66
"type": "git",

lib/apmsynthetics/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-apmsynthetics",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Apm Synthetics Service",
55
"repository": {
66
"type": "git",

lib/apmtraces/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-apmtraces",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Apm Traces Service",
55
"repository": {
66
"type": "git",

lib/appmgmtcontrol/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-appmgmtcontrol",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Appmgmt Control Service",
55
"repository": {
66
"type": "git",

lib/artifacts/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-artifacts",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Artifacts Service",
55
"repository": {
66
"type": "git",

lib/audit/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "oci-audit",
3-
"version": "2.102.0",
3+
"version": "2.102.1",
44
"description": "OCI NodeJS client for Audit Service",
55
"repository": {
66
"type": "git",

0 commit comments

Comments
 (0)