Skip to content

Commit 1d510be

Browse files
fix: [Many APIs] fix typings for IAM methods (#4459)
* fix: fix typings for IAM methods docs: fixed links in the generated Markdown documentation PiperOrigin-RevId: 551610576 Source-Link: googleapis/googleapis@73b1313 Source-Link: googleapis/googleapis-gen@8bec066 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1kYXRhLy5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwaS1hcGlrZXlzLy5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwaS1zZXJ2aWNlY29udHJvbC8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwaS1zZXJ2aWNlbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwaS1zZXJ2aWNldXNhZ2UvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcGVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFyZWExMjAtdGFibGVzLy5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFjY2Vzc2FwcHJvdmFsLy5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFkdmlzb3J5bm90aWZpY2F0aW9ucy8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFsbG95ZGIvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWdhdGV3YXkvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWdlZWNvbm5lY3QvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc2V0Ly5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc3VyZWR3b3JrbG9hZHMvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhcmVtZXRhbHNvbHV0aW9uLy5Pd2xCb3QueWFtbCIsImgiOiI4YmVjMDY2NDkyYTZkYTI4NTViMWI4Y2U1NjI2NjRjMGE2YjMwYjAxIn0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwY29ubmVjdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwY29ubmVjdG9ycy8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwZ2F0ZXdheXMvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtY2xpZW50Y29ubmVjdG9yc2VydmljZXMvLk93bEJvdC55YW1sIiwiaCI6IjhiZWMwNjY0OTJhNmRhMjg1NWIxYjhjZTU2MjY2NGMwYTZiMzBiMDEifQ== * chore!: update to Node 14 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com> Co-authored-by: Sofia Leon <[email protected]> Co-authored-by: sofisl <[email protected]>
1 parent c32965c commit 1d510be

196 files changed

Lines changed: 17747 additions & 2410 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

packages/google-ai-generativelanguage/package.json

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -44,29 +44,30 @@
4444
"prelint": "cd samples; npm link ../; npm i"
4545
},
4646
"dependencies": {
47-
"google-gax": "^3.5.8"
47+
"google-gax": "^4.0.3"
4848
},
4949
"devDependencies": {
5050
"@types/mocha": "^9.0.0",
51-
"@types/node": "^18.0.0",
51+
"@types/node": "^20.4.5",
5252
"@types/sinon": "^10.0.0",
5353
"c8": "^7.3.5",
54-
"gts": "^3.1.0",
54+
"gapic-tools": "^0.1.8",
55+
"gts": "^5.0.0",
5556
"jsdoc": "^4.0.0",
5657
"jsdoc-fresh": "^2.0.0",
5758
"jsdoc-region-tag": "^2.0.0",
58-
"long": "^5.2.3",
5959
"linkinator": "4.1.2",
60+
"long": "^5.2.3",
6061
"mocha": "^9.2.2",
6162
"null-loader": "^4.0.1",
6263
"pack-n-play": "^1.0.0-2",
6364
"sinon": "^15.0.0",
6465
"ts-loader": "^9.0.0",
65-
"typescript": "^4.6.4",
66+
"typescript": "^5.1.6",
6667
"webpack": "^5.9.0",
6768
"webpack-cli": "^5.0.0"
6869
},
6970
"engines": {
70-
"node": ">=12.0.0"
71+
"node": ">=14.0.0"
7172
}
7273
}
Lines changed: 319 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,319 @@
1+
{
2+
"clientLibrary": {
3+
"name": "nodejs-generativelanguage",
4+
"version": "0.2.1",
5+
"language": "TYPESCRIPT",
6+
"apis": [
7+
{
8+
"id": "google.ai.generativelanguage.v1beta2",
9+
"version": "v1beta2"
10+
}
11+
]
12+
},
13+
"snippets": [
14+
{
15+
"regionTag": "generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_async",
16+
"title": "DiscussService generateMessage Sample",
17+
"origin": "API_DEFINITION",
18+
"description": " Generates a response from the model given an input `MessagePrompt`.",
19+
"canonical": true,
20+
"file": "discuss_service.generate_message.js",
21+
"language": "JAVASCRIPT",
22+
"segments": [
23+
{
24+
"start": 25,
25+
"end": 90,
26+
"type": "FULL"
27+
}
28+
],
29+
"clientMethod": {
30+
"shortName": "GenerateMessage",
31+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService.GenerateMessage",
32+
"async": true,
33+
"parameters": [
34+
{
35+
"name": "model",
36+
"type": "TYPE_STRING"
37+
},
38+
{
39+
"name": "prompt",
40+
"type": ".google.ai.generativelanguage.v1beta2.MessagePrompt"
41+
},
42+
{
43+
"name": "temperature",
44+
"type": "TYPE_FLOAT"
45+
},
46+
{
47+
"name": "candidate_count",
48+
"type": "TYPE_INT32"
49+
},
50+
{
51+
"name": "top_p",
52+
"type": "TYPE_FLOAT"
53+
},
54+
{
55+
"name": "top_k",
56+
"type": "TYPE_INT32"
57+
}
58+
],
59+
"resultType": ".google.ai.generativelanguage.v1beta2.GenerateMessageResponse",
60+
"client": {
61+
"shortName": "DiscussServiceClient",
62+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussServiceClient"
63+
},
64+
"method": {
65+
"shortName": "GenerateMessage",
66+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService.GenerateMessage",
67+
"service": {
68+
"shortName": "DiscussService",
69+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService"
70+
}
71+
}
72+
}
73+
},
74+
{
75+
"regionTag": "generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_async",
76+
"title": "DiscussService countMessageTokens Sample",
77+
"origin": "API_DEFINITION",
78+
"description": " Runs a model's tokenizer on a string and returns the token count.",
79+
"canonical": true,
80+
"file": "discuss_service.count_message_tokens.js",
81+
"language": "JAVASCRIPT",
82+
"segments": [
83+
{
84+
"start": 25,
85+
"end": 61,
86+
"type": "FULL"
87+
}
88+
],
89+
"clientMethod": {
90+
"shortName": "CountMessageTokens",
91+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService.CountMessageTokens",
92+
"async": true,
93+
"parameters": [
94+
{
95+
"name": "model",
96+
"type": "TYPE_STRING"
97+
},
98+
{
99+
"name": "prompt",
100+
"type": ".google.ai.generativelanguage.v1beta2.MessagePrompt"
101+
}
102+
],
103+
"resultType": ".google.ai.generativelanguage.v1beta2.CountMessageTokensResponse",
104+
"client": {
105+
"shortName": "DiscussServiceClient",
106+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussServiceClient"
107+
},
108+
"method": {
109+
"shortName": "CountMessageTokens",
110+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService.CountMessageTokens",
111+
"service": {
112+
"shortName": "DiscussService",
113+
"fullName": "google.ai.generativelanguage.v1beta2.DiscussService"
114+
}
115+
}
116+
}
117+
},
118+
{
119+
"regionTag": "generativelanguage_v1beta2_generated_ModelService_GetModel_async",
120+
"title": "DiscussService getModel Sample",
121+
"origin": "API_DEFINITION",
122+
"description": " Gets information about a specific Model.",
123+
"canonical": true,
124+
"file": "model_service.get_model.js",
125+
"language": "JAVASCRIPT",
126+
"segments": [
127+
{
128+
"start": 25,
129+
"end": 55,
130+
"type": "FULL"
131+
}
132+
],
133+
"clientMethod": {
134+
"shortName": "GetModel",
135+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService.GetModel",
136+
"async": true,
137+
"parameters": [
138+
{
139+
"name": "name",
140+
"type": "TYPE_STRING"
141+
}
142+
],
143+
"resultType": ".google.ai.generativelanguage.v1beta2.Model",
144+
"client": {
145+
"shortName": "ModelServiceClient",
146+
"fullName": "google.ai.generativelanguage.v1beta2.ModelServiceClient"
147+
},
148+
"method": {
149+
"shortName": "GetModel",
150+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService.GetModel",
151+
"service": {
152+
"shortName": "ModelService",
153+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService"
154+
}
155+
}
156+
}
157+
},
158+
{
159+
"regionTag": "generativelanguage_v1beta2_generated_ModelService_ListModels_async",
160+
"title": "DiscussService listModels Sample",
161+
"origin": "API_DEFINITION",
162+
"description": " Lists models available through the API.",
163+
"canonical": true,
164+
"file": "model_service.list_models.js",
165+
"language": "JAVASCRIPT",
166+
"segments": [
167+
{
168+
"start": 25,
169+
"end": 66,
170+
"type": "FULL"
171+
}
172+
],
173+
"clientMethod": {
174+
"shortName": "ListModels",
175+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService.ListModels",
176+
"async": true,
177+
"parameters": [
178+
{
179+
"name": "page_size",
180+
"type": "TYPE_INT32"
181+
},
182+
{
183+
"name": "page_token",
184+
"type": "TYPE_STRING"
185+
}
186+
],
187+
"resultType": ".google.ai.generativelanguage.v1beta2.ListModelsResponse",
188+
"client": {
189+
"shortName": "ModelServiceClient",
190+
"fullName": "google.ai.generativelanguage.v1beta2.ModelServiceClient"
191+
},
192+
"method": {
193+
"shortName": "ListModels",
194+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService.ListModels",
195+
"service": {
196+
"shortName": "ModelService",
197+
"fullName": "google.ai.generativelanguage.v1beta2.ModelService"
198+
}
199+
}
200+
}
201+
},
202+
{
203+
"regionTag": "generativelanguage_v1beta2_generated_TextService_GenerateText_async",
204+
"title": "DiscussService generateText Sample",
205+
"origin": "API_DEFINITION",
206+
"description": " Generates a response from the model given an input message.",
207+
"canonical": true,
208+
"file": "text_service.generate_text.js",
209+
"language": "JAVASCRIPT",
210+
"segments": [
211+
{
212+
"start": 25,
213+
"end": 119,
214+
"type": "FULL"
215+
}
216+
],
217+
"clientMethod": {
218+
"shortName": "GenerateText",
219+
"fullName": "google.ai.generativelanguage.v1beta2.TextService.GenerateText",
220+
"async": true,
221+
"parameters": [
222+
{
223+
"name": "model",
224+
"type": "TYPE_STRING"
225+
},
226+
{
227+
"name": "prompt",
228+
"type": ".google.ai.generativelanguage.v1beta2.TextPrompt"
229+
},
230+
{
231+
"name": "temperature",
232+
"type": "TYPE_FLOAT"
233+
},
234+
{
235+
"name": "candidate_count",
236+
"type": "TYPE_INT32"
237+
},
238+
{
239+
"name": "max_output_tokens",
240+
"type": "TYPE_INT32"
241+
},
242+
{
243+
"name": "top_p",
244+
"type": "TYPE_FLOAT"
245+
},
246+
{
247+
"name": "top_k",
248+
"type": "TYPE_INT32"
249+
},
250+
{
251+
"name": "safety_settings",
252+
"type": "TYPE_MESSAGE[]"
253+
},
254+
{
255+
"name": "stop_sequences",
256+
"type": "TYPE_STRING[]"
257+
}
258+
],
259+
"resultType": ".google.ai.generativelanguage.v1beta2.GenerateTextResponse",
260+
"client": {
261+
"shortName": "TextServiceClient",
262+
"fullName": "google.ai.generativelanguage.v1beta2.TextServiceClient"
263+
},
264+
"method": {
265+
"shortName": "GenerateText",
266+
"fullName": "google.ai.generativelanguage.v1beta2.TextService.GenerateText",
267+
"service": {
268+
"shortName": "TextService",
269+
"fullName": "google.ai.generativelanguage.v1beta2.TextService"
270+
}
271+
}
272+
}
273+
},
274+
{
275+
"regionTag": "generativelanguage_v1beta2_generated_TextService_EmbedText_async",
276+
"title": "DiscussService embedText Sample",
277+
"origin": "API_DEFINITION",
278+
"description": " Generates an embedding from the model given an input message.",
279+
"canonical": true,
280+
"file": "text_service.embed_text.js",
281+
"language": "JAVASCRIPT",
282+
"segments": [
283+
{
284+
"start": 25,
285+
"end": 59,
286+
"type": "FULL"
287+
}
288+
],
289+
"clientMethod": {
290+
"shortName": "EmbedText",
291+
"fullName": "google.ai.generativelanguage.v1beta2.TextService.EmbedText",
292+
"async": true,
293+
"parameters": [
294+
{
295+
"name": "model",
296+
"type": "TYPE_STRING"
297+
},
298+
{
299+
"name": "text",
300+
"type": "TYPE_STRING"
301+
}
302+
],
303+
"resultType": ".google.ai.generativelanguage.v1beta2.EmbedTextResponse",
304+
"client": {
305+
"shortName": "TextServiceClient",
306+
"fullName": "google.ai.generativelanguage.v1beta2.TextServiceClient"
307+
},
308+
"method": {
309+
"shortName": "EmbedText",
310+
"fullName": "google.ai.generativelanguage.v1beta2.TextService.EmbedText",
311+
"service": {
312+
"shortName": "TextService",
313+
"fullName": "google.ai.generativelanguage.v1beta2.TextService"
314+
}
315+
}
316+
}
317+
}
318+
]
319+
}

packages/google-ai-generativelanguage/samples/generated/v1beta2/text_service.generate_text.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,13 +90,13 @@ function main(model, prompt) {
9090
* `SafetyCategory` provided in the list, the API will use the default safety
9191
* setting for that category.
9292
*/
93-
// const safetySettings = 1234
93+
// const safetySettings = [1,2,3,4]
9494
/**
9595
* The set of character sequences (up to 5) that will stop output generation.
9696
* If specified, the API will stop at the first appearance of a stop
9797
* sequence. The stop sequence will not be included as part of the response.
9898
*/
99-
// const stopSequences = 'abc123'
99+
// const stopSequences = ['abc','def']
100100

101101
// Imports the Generativelanguage library
102102
const {TextServiceClient} = require('@google-ai/generativelanguage').v1beta2;

packages/google-ai-generativelanguage/samples/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"license": "Apache-2.0",
55
"author": "Google LLC",
66
"engines": {
7-
"node": ">=12.0.0"
7+
"node": ">=14.0.0"
88
},
99
"files": [
1010
"*.js"
@@ -21,4 +21,4 @@
2121
"chai": "^4.2.0",
2222
"mocha": "^8.0.0"
2323
}
24-
}
24+
}

0 commit comments

Comments
 (0)