Skip to content

Commit 785c1db

Browse files
committed
fix(client): include openllm.client into main module [skip ci]
Signed-off-by: Aaron <[email protected]>
1 parent d0aaf80 commit 785c1db

File tree

11 files changed

+13
-56
lines changed

11 files changed

+13
-56
lines changed

.github/workflows/build.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ on:
1313
- ".github/workflows/build.yaml"
1414
- "src/openllm/bundle/oci/Dockerfile"
1515
- "src/openllm/**"
16-
- "src/openllm_client/**"
1716
env:
1817
LINES: 120
1918
COLUMNS: 120

DEVELOPMENT.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@ openllm/
7474
├── examples # Usage demonstration scripts
7575
├── src
7676
│ ├── openllm # openllm core
77-
│ ├── openllm_client # openllm python client
7877
│ └── openllm-node # openllm nodejs library
7978
├── tests # Automated Tests
8079
├── tools # Utilities Script

pyproject.toml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ avoid-escape = false
284284
"src/openllm/models/**" = ["I001", "E", "D", "F"]
285285
"src/openllm/utils/__init__.py" = ["I001"]
286286
"src/openllm/utils/import_utils.py" = ["PLW0603"]
287-
"src/openllm_client/runtimes/*" = ["D107"]
287+
"src/openllm/client/runtimes/*" = ["D107"]
288288
"tests/**/*" = [
289289
"S101",
290290
"TID252",
@@ -395,7 +395,7 @@ exclude = [
395395
"examples",
396396
"tests",
397397
]
398-
include = ["src/openllm", "src/openllm_client"]
398+
include = ["src/openllm"]
399399
pythonVersion = "3.12"
400400
reportMissingImports = "warning"
401401
reportMissingTypeStubs = false
@@ -411,7 +411,7 @@ typeCheckingMode = "strict"
411411
# TODO: Enable model for strict type checking
412412
exclude = ["src/openllm/playground/", "src/openllm/utils/dummy_*.py", "src/openllm/models"]
413413
local_partial_types = true
414-
modules = ["openllm", "openllm_client"]
414+
modules = ["openllm"]
415415
mypy_path = "typings"
416416
pretty = true
417417
python_version = "3.11"
@@ -500,7 +500,6 @@ include = [
500500
"src/openllm/_quantisation.py",
501501
"src/openllm/_generation.py",
502502
"src/openllm/_strategies.py",
503-
"src/openllm/client.py",
504503
"src/openllm/exceptions.py",
505504
"src/openllm/testing.py",
506505
]

src/openllm/cli/entrypoint.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,7 @@
6969
start_command_factory,
7070
workers_per_resource_option,
7171
)
72-
from .. import (
73-
bundle,
74-
client as openllm_client,
75-
serialisation,
76-
)
72+
from .. import bundle, serialisation
7773
from ..exceptions import OpenLLMException
7874
from ..models.auto import (
7975
CONFIG_MAPPING,
@@ -109,10 +105,10 @@
109105

110106
if t.TYPE_CHECKING:
111107
import torch
112-
from openllm_client.runtimes.base import BaseClient
113108

114109
from bentoml._internal.bento import BentoStore
115110
from bentoml._internal.container import DefaultBuilder
111+
from openllm.client import BaseClient
116112

117113
from .._schema import EmbeddingsOutput
118114
from .._types import DictStrAny, LiteralRuntime, P
@@ -713,7 +709,7 @@ def instruct_command(endpoint: str, timeout: int, agent: t.LiteralString, output
713709
--text "¡Este es un API muy agradable!"
714710
```
715711
"""
716-
client = openllm_client.HTTPClient(endpoint, timeout=timeout)
712+
client = openllm.client.HTTPClient(endpoint, timeout=timeout)
717713

718714
try:
719715
client.call("metadata")
@@ -745,7 +741,7 @@ def embed_command(ctx: click.Context, text: tuple[str, ...], endpoint: str, time
745741
$ openllm embed --endpoint http://12.323.2.1:3000 "What is the meaning of life?" "How many stars are there in the sky?"
746742
```
747743
"""
748-
client = t.cast("BaseClient[t.Any]", openllm_client.HTTPClient(endpoint, timeout=timeout) if server_type == "http" else openllm_client.GrpcClient(endpoint, timeout=timeout))
744+
client = t.cast("BaseClient[t.Any]", openllm.client.HTTPClient(endpoint, timeout=timeout) if server_type == "http" else openllm.client.GrpcClient(endpoint, timeout=timeout))
749745
try:
750746
gen_embed = client.embed(text)
751747
except ValueError:
@@ -778,7 +774,7 @@ def query_command(ctx: click.Context, /, prompt: str, endpoint: str, timeout: in
778774
"""
779775
_memoized = {k: orjson.loads(v[0]) for k, v in _memoized.items() if v}
780776
if server_type == "grpc": endpoint = re.sub(r"http://", "", endpoint)
781-
client = t.cast("BaseClient[t.Any]", openllm_client.HTTPClient(endpoint, timeout=timeout) if server_type == "http" else openllm_client.GrpcClient(endpoint, timeout=timeout))
777+
client = t.cast("BaseClient[t.Any]", openllm.client.HTTPClient(endpoint, timeout=timeout) if server_type == "http" else openllm.client.GrpcClient(endpoint, timeout=timeout))
782778
input_fg, generated_fg = "magenta", "cyan"
783779
if output != "porcelain":
784780
termui.echo("==Input==\n", fg="white")

src/openllm/client.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

src/openllm_client/__init__.py renamed to src/openllm/client/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@
66
"""
77
from __future__ import annotations
88

9-
from .runtimes.grpc import (
9+
from .runtimes import (
1010
AsyncGrpcClient as AsyncGrpcClient,
11-
GrpcClient as GrpcClient,
12-
)
13-
from .runtimes.http import (
1411
AsyncHTTPClient as AsyncHTTPClient,
12+
BaseAsyncClient as BaseAsyncClient,
13+
BaseClient as BaseClient,
14+
GrpcClient as GrpcClient,
1515
HTTPClient as HTTPClient,
1616
)
File renamed without changes.
File renamed without changes.

src/openllm_client/runtimes/grpc.py renamed to src/openllm/client/runtimes/grpc.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
from __future__ import annotations
32
import asyncio
43
import logging

src/openllm_client/runtimes/http.py renamed to src/openllm/client/runtimes/http.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
from __future__ import annotations
32
import logging
43
import typing as t

0 commit comments

Comments
 (0)