Skip to content

Commit d18a84f

Browse files
committed
updated to support http streamable
1 parent 80de44c commit d18a84f

File tree

13 files changed

+691
-132
lines changed

13 files changed

+691
-132
lines changed

examples/context7_chuk_integration_demo.py

Lines changed: 561 additions & 0 deletions
Large diffs are not rendered by default.

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "chuk-tool-processor"
7-
version = "0.6.4"
7+
version = "0.6.5"
88
description = "Async-native framework for registering, discovering, and executing tools referenced in LLM responses"
99
readme = "README.md"
1010
requires-python = ">=3.11"
@@ -41,7 +41,7 @@ classifiers = [
4141
"Typing :: Typed",
4242
]
4343
dependencies = [
44-
"chuk-mcp>=0.5.1",
44+
"chuk-mcp>=0.5.2",
4545
"dotenv>=0.9.9",
4646
"pydantic>=2.11.3",
4747
"uuid>=1.30",

src/chuk_tool_processor/logging/helpers.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,15 +58,15 @@ async def log_context_span(
5858
prev = log_context.get_copy()
5959
log_context.update(span_ctx)
6060

61-
logger.info("Starting %s", operation)
61+
logger.debug("Starting %s", operation)
6262
try:
6363
yield
6464
if log_duration:
65-
logger.info(
65+
logger.debug(
6666
"Completed %s", operation, extra={"context": {"duration": time.time() - start}}
6767
)
6868
else:
69-
logger.info("Completed %s", operation)
69+
logger.debug("Completed %s", operation)
7070
except Exception as exc:
7171
logger.exception(
7272
"Error in %s: %s", operation, exc, extra={"context": {"duration": time.time() - start}}
@@ -97,10 +97,10 @@ async def request_logging(
9797
logger = get_logger("chuk_tool_processor.request")
9898
request_id = log_context.start_request(request_id)
9999
start = time.time()
100-
logger.info("Starting request %s", request_id)
100+
logger.debug("Starting request %s", request_id)
101101
try:
102102
yield request_id
103-
logger.info(
103+
logger.debug(
104104
"Completed request %s",
105105
request_id,
106106
extra={"context": {"duration": time.time() - start}},
@@ -184,4 +184,4 @@ async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
184184
if tool_result.error:
185185
logger.error("Tool %s failed: %s", tool_call.tool, tool_result.error, extra={"context": ctx})
186186
else:
187-
logger.info("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})
187+
logger.debug("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})

src/chuk_tool_processor/mcp/register_mcp_tools.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ async def register_mcp_tools(
9595
except Exception as exc:
9696
logger.error("Failed to register MCP tool '%s': %s", tool_name, exc)
9797

98-
logger.info("MCP registration complete - %d tool(s) available", len(registered))
98+
logger.debug("MCP registration complete - %d tool(s) available", len(registered))
9999
return registered
100100

101101

@@ -134,14 +134,14 @@ async def update_mcp_tools_stream_manager(
134134
if tool and hasattr(tool, 'set_stream_manager'):
135135
tool.set_stream_manager(new_stream_manager)
136136
updated_count += 1
137-
logger.debug(f"Updated StreamManager for tool '{namespace}:{tool_name}'")
137+
logger.debug("Updated StreamManager for tool '%s:%s'", namespace, tool_name)
138138
except Exception as e:
139-
logger.warning(f"Failed to update StreamManager for tool '{namespace}:{tool_name}': {e}")
139+
logger.warning("Failed to update StreamManager for tool '%s:%s': %s", namespace, tool_name, e)
140140

141141
action = "connected" if new_stream_manager else "disconnected"
142-
logger.info(f"StreamManager {action} for {updated_count} tools in namespace '{namespace}'")
142+
logger.debug("StreamManager %s for %d tools in namespace '%s'", action, updated_count, namespace)
143143

144144
except Exception as e:
145-
logger.error(f"Failed to update tools in namespace '{namespace}': {e}")
145+
logger.error("Failed to update tools in namespace '%s': %s", namespace, e)
146146

147147
return updated_count

src/chuk_tool_processor/mcp/setup_mcp_http_streamable.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ async def setup_mcp_http_streamable(
110110
max_retries=max_retries,
111111
)
112112

113-
logger.info(
114-
"MCP (HTTP Streamable) initialised - %s tool%s registered into namespace '%s'",
113+
logger.debug(
114+
"MCP (HTTP Streamable) initialised - %d tool%s registered into namespace '%s'",
115115
len(registered),
116116
"" if len(registered) == 1 else "s",
117117
namespace,

src/chuk_tool_processor/mcp/setup_mcp_sse.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,8 @@ async def setup_mcp_sse( # noqa: C901 - long but just a config facade
8989
max_retries=max_retries,
9090
)
9191

92-
logger.info(
93-
"MCP (SSE) initialised - %s tool%s registered into namespace '%s'",
92+
logger.debug(
93+
"MCP (SSE) initialised - %d tool%s registered into namespace '%s'",
9494
len(registered),
9595
"" if len(registered) == 1 else "s",
9696
namespace,

src/chuk_tool_processor/mcp/setup_mcp_stdio.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ async def setup_mcp_stdio( # noqa: C901 - long but just a config facade
7373
max_retries=max_retries,
7474
)
7575

76-
logger.info(
77-
"MCP (stdio) initialised - %s tool%s registered into namespace '%s'",
76+
logger.debug(
77+
"MCP (stdio) initialised - %d tool%s registered into namespace '%s'",
7878
len(registered),
7979
"" if len(registered) == 1 else "s",
8080
namespace,

src/chuk_tool_processor/mcp/stream_manager.py

Lines changed: 30 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async def create(
7373
)
7474
return inst
7575
except asyncio.TimeoutError:
76-
logger.error(f"StreamManager initialization timed out after {initialization_timeout}s")
76+
logger.error("StreamManager initialization timed out after %ss", initialization_timeout)
7777
raise RuntimeError(f"StreamManager initialization timed out after {initialization_timeout}s")
7878

7979
@classmethod
@@ -99,7 +99,7 @@ async def create_with_sse(
9999
)
100100
return inst
101101
except asyncio.TimeoutError:
102-
logger.error(f"SSE StreamManager initialization timed out after {initialization_timeout}s")
102+
logger.error("SSE StreamManager initialization timed out after %ss", initialization_timeout)
103103
raise RuntimeError(f"SSE StreamManager initialization timed out after {initialization_timeout}s")
104104

105105
@classmethod
@@ -125,7 +125,7 @@ async def create_with_http_streamable(
125125
)
126126
return inst
127127
except asyncio.TimeoutError:
128-
logger.error(f"HTTP Streamable StreamManager initialization timed out after {initialization_timeout}s")
128+
logger.error("HTTP Streamable StreamManager initialization timed out after %ss", initialization_timeout)
129129
raise RuntimeError(f"HTTP Streamable StreamManager initialization timed out after {initialization_timeout}s")
130130

131131
# ------------------------------------------------------------------ #
@@ -196,7 +196,7 @@ async def initialize(
196196
else:
197197
sse_url = "http://localhost:8000"
198198
api_key = None
199-
logger.warning(f"No URL configured for SSE transport, using default: {sse_url}")
199+
logger.warning("No URL configured for SSE transport, using default: %s", sse_url)
200200

201201
transport = SSETransport(
202202
sse_url,
@@ -215,7 +215,7 @@ async def initialize(
215215
http_url = "http://localhost:8000"
216216
api_key = None
217217
session_id = None
218-
logger.warning(f"No URL configured for HTTP Streamable transport, using default: {http_url}")
218+
logger.warning("No URL configured for HTTP Streamable transport, using default: %s", http_url)
219219

220220
transport = HTTPStreamableTransport(
221221
http_url,
@@ -252,13 +252,13 @@ async def initialize(
252252
"status": status,
253253
}
254254
)
255-
logger.info("Initialised %s - %d tool(s)", server_name, len(tools))
255+
logger.debug("Initialised %s - %d tool(s)", server_name, len(tools))
256256
except asyncio.TimeoutError:
257257
logger.error("Timeout initialising %s", server_name)
258258
except Exception as exc:
259259
logger.error("Error initialising %s: %s", server_name, exc)
260260

261-
logger.info(
261+
logger.debug(
262262
"StreamManager ready - %d server(s), %d tool(s)",
263263
len(self.transports),
264264
len(self.all_tools),
@@ -307,13 +307,13 @@ async def initialize_with_sse(
307307
self.server_info.append(
308308
{"id": idx, "name": name, "tools": len(tools), "status": status}
309309
)
310-
logger.info("Initialised SSE %s - %d tool(s)", name, len(tools))
310+
logger.debug("Initialised SSE %s - %d tool(s)", name, len(tools))
311311
except asyncio.TimeoutError:
312312
logger.error("Timeout initialising SSE %s", name)
313313
except Exception as exc:
314314
logger.error("Error initialising SSE %s: %s", name, exc)
315315

316-
logger.info(
316+
logger.debug(
317317
"StreamManager ready - %d SSE server(s), %d tool(s)",
318318
len(self.transports),
319319
len(self.all_tools),
@@ -364,13 +364,13 @@ async def initialize_with_http_streamable(
364364
self.server_info.append(
365365
{"id": idx, "name": name, "tools": len(tools), "status": status}
366366
)
367-
logger.info("Initialised HTTP Streamable %s - %d tool(s)", name, len(tools))
367+
logger.debug("Initialised HTTP Streamable %s - %d tool(s)", name, len(tools))
368368
except asyncio.TimeoutError:
369369
logger.error("Timeout initialising HTTP Streamable %s", name)
370370
except Exception as exc:
371371
logger.error("Error initialising HTTP Streamable %s: %s", name, exc)
372372

373-
logger.info(
373+
logger.debug(
374374
"StreamManager ready - %d HTTP Streamable server(s), %d tool(s)",
375375
len(self.transports),
376376
len(self.all_tools),
@@ -395,20 +395,20 @@ async def list_tools(self, server_name: str) -> List[Dict[str, Any]]:
395395
return []
396396

397397
if server_name not in self.transports:
398-
logger.error(f"Server '{server_name}' not found in transports")
398+
logger.error("Server '%s' not found in transports", server_name)
399399
return []
400400

401401
transport = self.transports[server_name]
402402

403403
try:
404404
tools = await asyncio.wait_for(transport.get_tools(), timeout=10.0)
405-
logger.debug(f"Found {len(tools)} tools for server {server_name}")
405+
logger.debug("Found %d tools for server %s", len(tools), server_name)
406406
return tools
407407
except asyncio.TimeoutError:
408-
logger.error(f"Timeout listing tools for server {server_name}")
408+
logger.error("Timeout listing tools for server %s", server_name)
409409
return []
410410
except Exception as e:
411-
logger.error(f"Error listing tools for server {server_name}: {e}")
411+
logger.error("Error listing tools for server %s: %s", server_name, e)
412412
return []
413413

414414
# ------------------------------------------------------------------ #
@@ -541,7 +541,7 @@ async def close(self) -> None:
541541
self._closed = True
542542
return
543543

544-
logger.debug(f"Closing {len(self.transports)} transports...")
544+
logger.debug("Closing %d transports...", len(self.transports))
545545

546546
try:
547547
# Use shield to protect the cleanup operation from cancellation
@@ -551,7 +551,7 @@ async def close(self) -> None:
551551
logger.debug("Close operation cancelled, performing synchronous cleanup")
552552
self._sync_cleanup()
553553
except Exception as e:
554-
logger.debug(f"Error during close: {e}")
554+
logger.debug("Error during close: %s", e)
555555
self._sync_cleanup()
556556
finally:
557557
self._closed = True
@@ -565,7 +565,7 @@ async def _do_close_all_transports(self) -> None:
565565
try:
566566
await self._concurrent_close(transport_items, close_results)
567567
except Exception as e:
568-
logger.debug(f"Concurrent close failed: {e}, falling back to sequential close")
568+
logger.debug("Concurrent close failed: %s, falling back to sequential close", e)
569569
# Strategy 2: Fall back to sequential close
570570
await self._sequential_close(transport_items, close_results)
571571

@@ -575,7 +575,7 @@ async def _do_close_all_transports(self) -> None:
575575
# Log summary
576576
if close_results:
577577
successful_closes = sum(1 for _, success, _ in close_results if success)
578-
logger.debug(f"Transport cleanup: {successful_closes}/{len(close_results)} closed successfully")
578+
logger.debug("Transport cleanup: %d/%d closed successfully", successful_closes, len(close_results))
579579

580580
async def _concurrent_close(self, transport_items: List[Tuple[str, MCPBaseTransport]], close_results: List) -> None:
581581
"""Try to close all transports concurrently."""
@@ -602,10 +602,10 @@ async def _concurrent_close(self, transport_items: List[Tuple[str, MCPBaseTransp
602602
for i, (name, _) in enumerate(close_tasks):
603603
result = results[i] if i < len(results) else None
604604
if isinstance(result, Exception):
605-
logger.debug(f"Transport {name} close failed: {result}")
605+
logger.debug("Transport %s close failed: %s", name, result)
606606
close_results.append((name, False, str(result)))
607607
else:
608-
logger.debug(f"Transport {name} closed successfully")
608+
logger.debug("Transport %s closed successfully", name)
609609
close_results.append((name, True, None))
610610

611611
except asyncio.TimeoutError:
@@ -632,16 +632,16 @@ async def _sequential_close(self, transport_items: List[Tuple[str, MCPBaseTransp
632632
self._close_single_transport(name, transport),
633633
timeout=0.5 # Short timeout per transport
634634
)
635-
logger.debug(f"Closed transport: {name}")
635+
logger.debug("Closed transport: %s", name)
636636
close_results.append((name, True, None))
637637
except asyncio.TimeoutError:
638-
logger.debug(f"Transport {name} close timed out (normal during shutdown)")
638+
logger.debug("Transport %s close timed out (normal during shutdown)", name)
639639
close_results.append((name, False, "timeout"))
640640
except asyncio.CancelledError:
641-
logger.debug(f"Transport {name} close cancelled during event loop shutdown")
641+
logger.debug("Transport %s close cancelled during event loop shutdown", name)
642642
close_results.append((name, False, "cancelled"))
643643
except Exception as e:
644-
logger.debug(f"Error closing transport {name}: {e}")
644+
logger.debug("Error closing transport %s: %s", name, e)
645645
close_results.append((name, False, str(e)))
646646

647647
async def _close_single_transport(self, name: str, transport: MCPBaseTransport) -> None:
@@ -650,19 +650,19 @@ async def _close_single_transport(self, name: str, transport: MCPBaseTransport)
650650
if hasattr(transport, 'close') and callable(transport.close):
651651
await transport.close()
652652
else:
653-
logger.debug(f"Transport {name} has no close method")
653+
logger.debug("Transport %s has no close method", name)
654654
except Exception as e:
655-
logger.debug(f"Error closing transport {name}: {e}")
655+
logger.debug("Error closing transport %s: %s", name, e)
656656
raise
657657

658658
def _sync_cleanup(self) -> None:
659659
"""Synchronous cleanup for use when async cleanup fails."""
660660
try:
661661
transport_count = len(self.transports)
662662
self._cleanup_state()
663-
logger.debug(f"Synchronous cleanup completed for {transport_count} transports")
663+
logger.debug("Synchronous cleanup completed for %d transports", transport_count)
664664
except Exception as e:
665-
logger.debug(f"Error during synchronous cleanup: {e}")
665+
logger.debug("Error during synchronous cleanup: %s", e)
666666

667667
def _cleanup_state(self) -> None:
668668
"""Clean up internal state synchronously."""
@@ -673,7 +673,7 @@ def _cleanup_state(self) -> None:
673673
self.all_tools.clear()
674674
self.server_names.clear()
675675
except Exception as e:
676-
logger.debug(f"Error during state cleanup: {e}")
676+
logger.debug("Error during state cleanup: %s", e)
677677

678678
# ------------------------------------------------------------------ #
679679
# backwards-compat: streams helper #

0 commit comments

Comments
 (0)