Skip to content

Commit a711a5f

Browse files
onkarbhardwajmirianfsilvajpwsutton
authored
New Release (IBM#60)
* Informative error messages in async generator and type change in GenAI exception (IBM#57) * update greedy examples removing incompatible params (IBM#59) * Fix/setup cfg (IBM#49) * fix: including README in pypi deployment * fix: adding build dependency and changing workflow to use it Signed-off-by: James Sutton <[email protected]> Co-authored-by: Mírian Silva <[email protected]> Co-authored-by: James Sutton <[email protected]>
1 parent ebaa769 commit a711a5f

15 files changed

+23
-54
lines changed

.github/workflows/python-publish.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
python -m pip install -e ".[dev]"
2525
- name: Version and package
2626
run: |
27-
pip wheel -w dist -e . --no-deps
27+
python -m build
2828
- name: Publish package
2929
uses: pypa/gh-action-pypi-publish@f9ed8ba9ad06d20b1ebb6002ffb93050ed9a1951
3030
with:

examples/user/grid_search_params.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
# use the dictionary to define the search space, keep the keys as
2222
# the same from GenerateParams and use a list for the values to search
2323
my_space_params = {
24-
"decoding_method": ["sample", "greedy"],
24+
"decoding_method": ["sample"],
2525
"max_new_tokens": [10, 20],
2626
"min_new_tokens": [1, 2],
2727
"temperature": [0.7, 0.8, 0.9, 1.5],

examples/user/prompt_csv_random_rows.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,6 @@
2828
max_new_tokens=15,
2929
min_new_tokens=1,
3030
stream=False,
31-
temperature=0.7,
32-
top_k=50,
33-
top_p=1,
34-
random_seed=2,
3531
)
3632

3733
creds = Credentials(api_key, api_endpoint)

examples/user/prompt_from_all_csv.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,6 @@
2828
max_new_tokens=15,
2929
min_new_tokens=1,
3030
stream=False,
31-
temperature=0.7,
32-
top_k=50,
33-
top_p=1,
34-
random_seed=2,
3531
)
3632

3733
creds = Credentials(api_key, api_endpoint)

examples/user/prompt_from_all_dataframe.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,6 @@
3535
max_new_tokens=15,
3636
min_new_tokens=1,
3737
stream=False,
38-
temperature=0.7,
39-
top_k=50,
40-
top_p=1,
41-
random_seed=2,
4238
)
4339

4440
creds = Credentials(api_key, api_endpoint)

examples/user/prompt_from_dataframe.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,6 @@
3434
max_new_tokens=15,
3535
min_new_tokens=1,
3636
stream=False,
37-
temperature=0.7,
38-
top_k=50,
39-
top_p=1,
40-
random_seed=2,
4137
)
4238

4339
creds = Credentials(api_key, api_endpoint)

examples/user/self-reflection.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,6 @@
2323
max_new_tokens=20,
2424
min_new_tokens=1,
2525
stream=False,
26-
temperature=0.7,
27-
top_k=50,
28-
top_p=1,
2926
)
3027

3128
creds = Credentials(api_key, api_endpoint)

pyproject.toml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[build-system]
2-
requires = ["setuptools>=67.6.0", "wheel>=0.40.0", "setuptools_scm[toml]>=7.1.0"]
2+
requires = ["setuptools>=68.0.0", "wheel>=0.40.0", "setuptools_scm[toml]>=7.1.0"]
33
build-backend = "setuptools.build_meta"
44

55
[project]
@@ -15,7 +15,7 @@ authors = [
1515
{name="James Sutton", email="[email protected]"},
1616
]
1717
description = "IBM Generative AI is a Python library built on IBM's large language model REST interface."
18-
readme = "README.MD"
18+
readme = "README.md"
1919
dependencies = [
2020
"urllib3<2", # https://github.com/psf/requests/issues/6432
2121
"requests>=2.31.0",
@@ -41,7 +41,8 @@ dev = [
4141
"pytest-asyncio>=0.21.0",
4242
"pytest-mock>=3.10.0",
4343
"setuptools-scm>=7.1.0",
44-
"pytest-httpx>=0.22.0"
44+
"pytest-httpx>=0.22.0",
45+
"build>=0.10.0"
4546
]
4647
langchain = [
4748
"langchain>=0.0.200"

src/genai/exceptions/genai_exception.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import logging
22
from typing import Union
33

4+
from httpx import Response
45
from pydantic import ValidationError
5-
from requests import Response
66

77
from genai.schemas.responses import ErrorResponse
88

src/genai/services/async_generator.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@ async def _get_response_json(self, model, inputs, params):
9797
return response
9898

9999
async def _task(self, inputs, batch_num):
100+
response = None
100101
try:
101102
response = await self._get_response_json(self.model_id, inputs, self.params)
102103
logger.debug("Received response = {}".format(response))
@@ -105,7 +106,11 @@ async def _task(self, inputs, batch_num):
105106
response = self.message_type_(**response)
106107
logger.debug("Cast to Response = {}".format(response))
107108
except Exception as e:
108-
logger.error("Exception raised async_generate and casting : {}, inputs = {}".format(str(e), inputs))
109+
logger.error(
110+
"Exception raised async_generate and casting : {}, response = {}, inputs = {}".format(
111+
str(e), response, inputs
112+
)
113+
)
109114
self.queue_.put_nowait((batch_num, len(inputs), None))
110115
return
111116
try:
@@ -114,7 +119,9 @@ async def _task(self, inputs, batch_num):
114119
for result in response.results:
115120
self.callback(result)
116121
except Exception as e:
117-
logger.error("Exception raised in callback : {}, inputs = {}".format(str(e), inputs))
122+
logger.error(
123+
"Exception raised in callback : {}, response = {}, inputs = {}".format(str(e), response, inputs)
124+
)
118125

119126
async def _schedule_requests(self):
120127
tasks = []

tests/test_concurrent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ async def test_concurrent_tokenize_not_inorder(self, mock_tokenize_json, tokeniz
309309
@pytest.mark.asyncio
310310
async def test_concurrent_tokenize_nones(self, mock_tokenize_json, tokenize_params, mocker):
311311
# test that if one request gets dropped, we get appropriate number of nones
312-
num_prompts = 14
312+
num_prompts = 18
313313
inputs = ["This is input number " + str(i) for i in range(num_prompts)]
314314
expected = SimpleResponse.tokenize_response_array_async(model=self.model, inputs=inputs)
315315
mock_tokenize_json.side_effect = expected

tests/test_generate_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ def setup_method(self):
1515

1616
# test all GenerateParams fields
1717
self.params = GenerateParams(
18-
decoding_method="greedy",
18+
decoding_method="sample",
1919
length_penalty=LengthPenalty(decay_factor=1.5, start_index=2),
2020
max_new_tokens=3,
2121
min_new_tokens=1,

tests/test_request_handler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def setup_method(self):
1414

1515
@pytest.fixture
1616
def params(seld):
17-
return GenerateParams(decoding_method="greedy", temperature=0.8).dict(by_alias=True, exclude_none=True)
17+
return GenerateParams(decoding_method="greedy").dict(by_alias=True, exclude_none=True)
1818

1919
def test_metadata_post(self, params):
2020
headers, json_data = RequestHandler._metadata(
@@ -30,7 +30,7 @@ def test_metadata_post(self, params):
3030
assert json_data == {
3131
"model_id": self.model,
3232
"inputs": self.inputs,
33-
"parameters": {"decoding_method": "greedy", "temperature": 0.8},
33+
"parameters": {"decoding_method": "greedy"},
3434
}
3535

3636
def test_metadata_get(self):

tests/test_service_interface.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def setup_method(self):
1717

1818
@pytest.fixture
1919
def params(seld):
20-
return GenerateParams(decoding_method="greedy", temperature=0.8, return_options=ReturnOptions(input_text=True))
20+
return GenerateParams(decoding_method="greedy", return_options=ReturnOptions(input_text=True))
2121

2222
@patch("genai.services.RequestHandler.patch")
2323
def test_tou(self, mocked_post_request):

tests/test_service_utils.py

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -14,29 +14,9 @@
1414
@pytest.mark.unit
1515
class TestServiceUtils(unittest.TestCase):
1616
def test_sanitize_params_with_params(self):
17-
input_genParams = GenerateParams(
18-
decoding_method="greedy", temperature=1.0, return_options=ReturnOptions(input_text=True)
19-
)
17+
input_genParams = GenerateParams(decoding_method="greedy", return_options=ReturnOptions(input_text=True))
2018

21-
expected_genParams = {"decoding_method": "greedy", "temperature": 1.0, "return_options": {"input_text": True}}
19+
expected_genParams = {"decoding_method": "greedy", "return_options": {"input_text": True}}
2220

2321
sanitized_dict = ServiceInterface._sanitize_params(input_genParams)
2422
self.assertEqual(expected_genParams, sanitized_dict)
25-
26-
def test_sanitize_params_with_dict(self):
27-
# TODO : Confirm what approach we want to have for user passing dict
28-
29-
# Test with dictionary
30-
# input_dict = {
31-
# "fieldA": None,
32-
# "fieldB": None,
33-
# "returns_options": "something",
34-
# }
35-
36-
# expected_dict = {
37-
# "return": "something",
38-
# }
39-
40-
# sanitized_dict = sanitize_params(input_dict)
41-
# self.assertEqual(expected_dict, sanitized_dict)
42-
pass

0 commit comments

Comments
 (0)