Skip to content

Commit 9db2ae6

Browse files
Copilotnotfolder
andcommitted
Fix lint errors in first 4 client files - achieve zero errors in github_client.py, lm_client.py, lmstudio_client.py, and mcp_tool_client.py
Co-authored-by: notfolder <20558197+notfolder@users.noreply.github.com>
1 parent 3c7dc1a commit 9db2ae6

4 files changed

Lines changed: 67 additions & 59 deletions

File tree

clients/github_client.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ def __init__(self, token: str | None = None, api_url: str | None = None) -> None
2929
ValueError: トークンが設定されていない場合
3030
3131
"""
32-
# トークンの設定(引数または環境変数から取得)
3332
self.token = token or os.environ.get("GITHUB_PERSONAL_ACCESS_TOKEN")
3433

3534
# APIのベースURLの設定(デフォルト: https://api.github.com)
@@ -290,7 +289,10 @@ def get_reviews_with_comments(
290289

291290
return reviews
292291

293-
def remove_url_fields(self, obj: Any) -> Any:
292+
def remove_url_fields(
293+
self,
294+
obj: object,
295+
) -> object:
294296
"""辞書またはリストから再帰的にURLのみを含むフィールドを削除する.
295297
296298
APIレスポンスから不要なURLフィールドを削除し、
@@ -304,14 +306,13 @@ def remove_url_fields(self, obj: Any) -> Any:
304306
305307
"""
306308

307-
def is_url(val: Any) -> bool:
309+
def is_url(val: object) -> bool:
308310
"""値がURL文字列かどうかを判定する."""
309311
return isinstance(val, str) and (
310312
val.startswith(("http://", "https://"))
311313
)
312314

313315
if isinstance(obj, dict):
314-
# 辞書の場合:URLでない値のみを残して再帰的に処理
315316
return {
316317
k: self.remove_url_fields(v)
317318
for k, v in obj.items()

clients/lm_client.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,16 @@
33
from abc import ABC, abstractmethod
44
from typing import Any
55

6-
from .llm_base import LLMClient
76
from .lmstudio_client import LMStudioClient
87
from .ollama_client import OllamaClient
98
from .openai_client import OpenAIClient
109

10+
# Conditional import for mock client to avoid circular imports in testing
11+
try:
12+
from tests.mocks.mock_llm_client import get_mock_llm_client
13+
except ImportError:
14+
get_mock_llm_client = None
15+
1116

1217
class LLMClient(ABC):
1318
@abstractmethod
@@ -19,7 +24,7 @@ def send_user_message(self, message: str) -> None:
1924
pass
2025

2126
@abstractmethod
22-
def send_function_result(self, name: str, result: Any) -> None:
27+
def send_function_result(self, name: str, result: object) -> None:
2328
pass
2429

2530
@abstractmethod
@@ -39,19 +44,13 @@ def get_llm_client(
3944
raise ValueError(msg)
4045
return LMStudioClient(config["llm"]["lmstudio"])
4146
if prov == "ollama":
42-
# TODO: functions support
4347
return OllamaClient(config["llm"]["ollama"])
4448
if prov == "openai":
4549
return OpenAIClient(config["llm"]["openai"], functions, tools)
4650
if prov == "mock":
47-
# Import here to avoid circular import during testing
48-
try:
49-
from tests.mocks.mock_llm_client import get_mock_llm_client
50-
51-
return get_mock_llm_client(config, functions, tools)
52-
except ImportError:
51+
if get_mock_llm_client is None:
5352
msg = "Mock LLM client not available - this should only be used in tests"
5453
raise ValueError(msg)
55-
else:
56-
msg = f"Unknown llm.provider: {prov}"
57-
raise ValueError(msg)
54+
return get_mock_llm_client(config, functions, tools)
55+
msg = f"Unknown llm.provider: {prov}"
56+
raise ValueError(msg)

clients/lmstudio_client.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,26 +2,26 @@
22

33
from typing import Any
44

5+
import lmstudio as lms
6+
from ollama import chat
7+
58
from .llm_base import LLMClient
69

710

811
class LMStudioClient(LLMClient):
912
def __init__(self, config: dict[str, Any]) -> None:
10-
import lmstudio as lms
11-
1213
lms.configure_default_client(config.get("base_url", "localhost:1234"))
1314
self.model = lms.llm(config.get("model"))
1415
self.chat = lms.Chat()
1516
self.last_response = None
1617

1718
def send_system_prompt(self, prompt: str) -> None:
1819
self.chat.add_system_prompt(prompt)
19-
# self.chat = self.model.Chat(prompt)
2020

2121
def send_user_message(self, message: str) -> None:
2222
self.chat.add_user_message(message)
2323

24-
def send_function_result(self, name: str, result: Any) -> None:
24+
def send_function_result(self, name: str, result: object) -> None:
2525
msg = "LMStudio does not support function calls. Use OpenAI compatible call instead."
2626
raise NotImplementedError(
2727
msg,
@@ -30,14 +30,11 @@ def send_function_result(self, name: str, result: Any) -> None:
3030
def get_response(self) -> str:
3131
result = self.model.respond(self.chat)
3232
self.chat.add_assistant_response(result)
33-
# self.chat.add_assistant_message(result)
3433
return str(result)
3534

3635

3736
class OllamaClient(LLMClient):
3837
def __init__(self, config: dict[str, Any]) -> None:
39-
from ollama import chat
40-
4138
self.chat = chat
4239
self.model = config["model"]
4340
self.max_token = config.get("max_token", 32768)

clients/mcp_tool_client.py

Lines changed: 47 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,10 @@
55
import json
66
import os
77
import threading
8-
from typing import Any
8+
from typing import TYPE_CHECKING, Any
9+
10+
if TYPE_CHECKING:
11+
from collections.abc import Awaitable, Callable
912

1013
from mcp import StdioServerParameters
1114
from mcp.client.session import ClientSession
@@ -27,7 +30,7 @@ def __init__(self, server_config: dict[str, Any], *, function_calling: bool = Tr
2730
self._system_prompt = None
2831
self.function_calling = function_calling
2932

30-
def call_tool(self, tool: str, args: dict[str, Any]) -> Any:
33+
def call_tool(self, tool: str, args: dict[str, Any]) -> object:
3134
with self.lock:
3235
return self._call_tool_sync(tool, args)
3336

@@ -46,48 +49,50 @@ def system_prompt(self) -> str:
4649
def close(self) -> None:
4750
pass # クライアントの状態管理が不要になったため何もしない
4851

49-
def _run_async(self, coro):
52+
def _run_async(self, coro: Awaitable[object]) -> object:
5053
loop = asyncio.new_event_loop()
5154
try:
5255
return loop.run_until_complete(coro)
5356
finally:
5457
loop.close()
5558

56-
def _run_with_session(self, coro_fn):
57-
async def wrapper():
59+
def _run_with_session(self, coro_fn: Callable[[ClientSession], Awaitable[object]]) -> object:
60+
async def wrapper() -> object:
5861
cmd = self.server_config["command"][0]
5962
args = self.server_config["command"][1:]
6063
env = self.server_config.get("env", {})
6164
merged_env = dict(os.environ)
6265
merged_env.update(env)
6366
server_params = StdioServerParameters(command=cmd, args=args, env=merged_env)
64-
async with stdio_client(server_params) as (read_stream, write_stream):
65-
async with ClientSession(read_stream, write_stream) as session:
66-
await session.initialize()
67-
# notifications/initialized送信
68-
notification = ClientNotification(
69-
InitializedNotification(method="notifications/initialized"),
70-
)
71-
await session.send_notification(notification)
72-
return await coro_fn(session)
67+
async with (
68+
stdio_client(server_params) as (read_stream, write_stream),
69+
ClientSession(read_stream, write_stream) as session,
70+
):
71+
await session.initialize()
72+
# notifications/initialized送信
73+
notification = ClientNotification(
74+
InitializedNotification(method="notifications/initialized"),
75+
)
76+
await session.send_notification(notification)
77+
return await coro_fn(session)
7378

7479
return self._run_async(wrapper())
7580

7681
def _git_blob_sha1_from_str(self, s: str, encoding: str = "utf-8") -> str:
77-
r"""Git blob SHA-1 を文字列から計算する。
82+
r"""Git blob SHA-1 を文字列から計算する.
83+
7884
- s: テキスト文字列(例:"Hello\n")
7985
- encoding: バイト化に使用するエンコーディング.
8086
"""
8187
data = s.encode(encoding)
8288
header = f"blob {len(data)}\0".encode()
8389
full = header + data
84-
return hashlib.sha1(full).hexdigest()
90+
return hashlib.sha1(full, usedforsecurity=False).hexdigest()
8591

86-
def _call_tool_sync(self, tool: str, args: dict[str, Any]) -> Any:
87-
# tool_name = tool.split('_', 1)[1]
92+
def _call_tool_sync(self, tool: str, args: dict[str, Any]) -> object:
8893
tool_name = tool
8994

90-
async def coro_fn(session):
95+
async def coro_fn(session: ClientSession) -> object:
9196
return await session.call_tool(tool_name, args)
9297

9398
result = self._run_with_session(coro_fn)
@@ -98,7 +103,7 @@ async def coro_fn(session):
98103
try:
99104
obj = json.loads(content.text)
100105
results.append(obj)
101-
except Exception:
106+
except (json.JSONDecodeError, ValueError):
102107
results.append(content.text)
103108
elif isinstance(content, EmbeddedResource):
104109
resource = content.resource
@@ -113,13 +118,13 @@ async def coro_fn(session):
113118

114119
return results[0] if len(results) == 1 else results
115120

116-
def _list_tools_sync(self):
117-
async def coro_fn(session):
121+
def _list_tools_sync(self) -> object:
122+
async def coro_fn(session: ClientSession) -> object:
118123
return await session.list_tools()
119124

120125
return self._run_with_session(coro_fn)
121126

122-
def _get_tools_sync(self):
127+
def _get_tools_sync(self) -> tuple[str, list[Tool]]:
123128
mcp_name = self.server_config.get("mcp_server_name", "")
124129
tools = self.list_tools().tools
125130
return mcp_name, tools
@@ -149,25 +154,31 @@ def get_function_calling_functions(self) -> list[dict[str, Any]]:
149154
for tool in tools
150155
]
151156

152-
def _get_system_prompt_sync(self):
157+
def _get_system_prompt_sync(self) -> str:
153158
mcp_name, tools = self._get_tools_sync()
154159
prompt_lines = [f"### {mcp_name} mcp tools"]
155-
for tool in tools:
156-
if isinstance(tool, Tool):
157-
tool = {
158-
"name": tool.name,
159-
"description": tool.description,
160-
"inputSchema": tool.inputSchema if isinstance(tool.inputSchema, dict) else {},
161-
"required": tool.inputSchema.get("required", []),
160+
for tool_obj in tools:
161+
if isinstance(tool_obj, Tool):
162+
tool_dict = {
163+
"name": tool_obj.name,
164+
"description": tool_obj.description,
165+
"inputSchema": (
166+
tool_obj.inputSchema
167+
if isinstance(tool_obj.inputSchema, dict)
168+
else {}
169+
),
170+
"required": tool_obj.inputSchema.get("required", []),
162171
}
163-
if not isinstance(tool, dict):
172+
else:
173+
tool_dict = tool_obj
174+
if not isinstance(tool_dict, dict):
164175
continue
165-
tool_name = f"{mcp_name}_{tool.get('name', '')}"
166-
desc = tool.get("description", "") or ""
176+
tool_name = f"{mcp_name}_{tool_dict.get('name', '')}"
177+
desc = tool_dict.get("description", "") or ""
167178
desc = desc.replace("\n", " ").replace("\r", " ").strip()
168-
input_schema = tool.get("inputSchema", {})
179+
input_schema = tool_dict.get("inputSchema", {})
169180
params = input_schema.get("properties", {}) if isinstance(input_schema, dict) else {}
170-
required = tool.get("required", []) or []
181+
required = tool_dict.get("required", []) or []
171182
param_str = (
172183
"{ "
173184
+ ", ".join(

0 commit comments

Comments
 (0)