Skip to content

Commit a1c8c72

Browse files
Fix docstrings
Change-Id: I6020df4e862a4f1d58462a4cd70876a8448293cf
1 parent f48cedc commit a1c8c72

File tree

2 files changed

+10
-9
lines changed

2 files changed

+10
-9
lines changed

google/generativeai/caching.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -210,12 +210,7 @@ def list(
210210
yield cls._decode_cached_content(cached_content)
211211

212212
def delete(self, client: glm.CachedServiceClient | None = None) -> None:
213-
"""Deletes `CachedContent` resource.
214-
215-
Args:
216-
name: The resource name referring to the cached content.
217-
Format: cachedContents/{id}.
218-
"""
213+
"""Deletes `CachedContent` resource."""
219214
if client is None:
220215
client = get_default_cache_client()
221216

tests/test_generative_models.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import collections
22
from collections.abc import Iterable
33
import copy
4+
import datetime
45
import pathlib
56
from typing import Any
67
import textwrap
@@ -112,7 +113,6 @@ def setUp(self):
112113
client_lib._client_manager.clients["generative"] = self.client
113114
client_lib._client_manager.clients["cache"] = self.client
114115

115-
116116
def test_hello(self):
117117
# Generate text from text prompt
118118
model = generative_models.GenerativeModel(model_name="gemini-pro")
@@ -338,7 +338,13 @@ def test_stream_prompt_feedback_not_blocked(self):
338338
dict(testcase_name="test_cached_content_as_id", cached_content="test-cached-content"),
339339
dict(
340340
testcase_name="test_cached_content_as_CachedContent_object",
341-
cached_content=caching.CachedContent.get(name="cachedContents/test-cached-content"),
341+
cached_content=caching.CachedContent(
342+
name="cachedContents/test-cached-content",
343+
model="models/gemini-1.0-pro-001",
344+
create_time=datetime.datetime.now(),
345+
update_time=datetime.datetime.now(),
346+
expire_time=datetime.datetime.now(),
347+
),
342348
),
343349
],
344350
)
@@ -1289,7 +1295,7 @@ def test_repr_for_model_created_from_cahced_content(self):
12891295
cached_content="test-cached-content"
12901296
)
12911297
result = repr(model)
1292-
self.assertIn("cached_content=cachedContent/test-cached-content", result)
1298+
self.assertIn("cached_content=cachedContents/test-cached-content", result)
12931299
self.assertIn("model_name='models/gemini-1.0-pro-001'", result)
12941300

12951301
def test_count_tokens_called_with_request_options(self):

0 commit comments

Comments
 (0)