Skip to content

clean up log.error #1109

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ This document contains critical information about working with this codebase. Fo
- Public APIs must have docstrings
- Functions must be focused and small
- Follow existing patterns exactly
- Line length: 88 chars maximum
- Line length: 120 chars maximum

3. Testing Requirements
- Framework: `uv run --frozen pytest`
Expand Down Expand Up @@ -116,3 +116,15 @@ This document contains critical information about working with this codebase. Fo
- Follow existing patterns
- Document public APIs
- Test thoroughly

## Exception Handling

- **Always use `logger.exception()` instead of `logger.error()` when catching exceptions**
- Don't include the exception in the message: `logger.exception("Failed")` not `logger.exception(f"Failed: {e}")`
- **Catch specific exceptions** where possible:
- File ops: `except (OSError, PermissionError):`
- JSON: `except json.JSONDecodeError:`
- Network: `except (ConnectionError, TimeoutError):`
- **Only catch `Exception` for**:
- Top-level handlers that must not crash
- Cleanup blocks (log at debug level)
5 changes: 2 additions & 3 deletions examples/servers/simple-auth/mcp_simple_auth/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,8 @@ def main(port: int, auth_server: str, transport: Literal["sse", "streamable-http
mcp_server.run(transport=transport)
logger.info("Server stopped")
return 0
except Exception as e:
logger.error(f"Server error: {e}")
logger.exception("Exception details:")
except Exception:
logger.exception("Server error")
return 1


Expand Down
10 changes: 4 additions & 6 deletions src/mcp/cli/claude.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,10 @@ def update_claude_config(
if not config_file.exists():
try:
config_file.write_text("{}")
except Exception as e:
logger.error(
except Exception:
logger.exception(
"Failed to create Claude config file",
extra={
"error": str(e),
"config_file": str(config_file),
},
)
Expand Down Expand Up @@ -139,11 +138,10 @@ def update_claude_config(
extra={"config_file": str(config_file)},
)
return True
except Exception as e:
logger.error(
except Exception:
logger.exception(
"Failed to update Claude config",
extra={
"error": str(e),
"config_file": str(config_file),
},
)
Expand Down
11 changes: 5 additions & 6 deletions src/mcp/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,12 +349,11 @@ def run(

server.run(**kwargs)

except Exception as e:
logger.error(
f"Failed to run server: {e}",
except Exception:
logger.exception(
"Failed to run server",
extra={
"file": str(file),
"error": str(e),
},
)
sys.exit(1)
Expand Down Expand Up @@ -464,8 +463,8 @@ def install(
if dotenv:
try:
env_dict |= {k: v for k, v in dotenv.dotenv_values(env_file).items() if v is not None}
except Exception as e:
logger.error(f"Failed to load .env file: {e}")
except (OSError, ValueError):
logger.exception("Failed to load .env file")
sys.exit(1)
else:
logger.error("python-dotenv is not installed. Cannot load .env file.")
Expand Down
8 changes: 4 additions & 4 deletions src/mcp/client/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,8 +464,8 @@ async def _handle_refresh_response(self, response: httpx.Response) -> bool:
await self.context.storage.set_tokens(token_response)

return True
except ValidationError as e:
logger.error(f"Invalid refresh response: {e}")
except ValidationError:
logger.exception("Invalid refresh response")
self.context.clear_tokens()
return False

Expand Down Expand Up @@ -522,8 +522,8 @@ async def async_auth_flow(self, request: httpx.Request) -> AsyncGenerator[httpx.
token_request = await self._exchange_token(auth_code, code_verifier)
token_response = yield token_request
await self._handle_token_response(token_response)
except Exception as e:
logger.error(f"OAuth flow error: {e}")
except Exception:
logger.exception("OAuth flow error")
raise

# Add authorization header and make request
Expand Down
8 changes: 4 additions & 4 deletions src/mcp/client/sse.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ async def sse_reader(
)
logger.debug(f"Received server message: {message}")
except Exception as exc:
logger.error(f"Error parsing server message: {exc}")
logger.exception("Error parsing server message")
await read_stream_writer.send(exc)
continue

Expand All @@ -106,7 +106,7 @@ async def sse_reader(
case _:
logger.warning(f"Unknown SSE event: {sse.event}")
except Exception as exc:
logger.error(f"Error in sse_reader: {exc}")
logger.exception("Error in sse_reader")
await read_stream_writer.send(exc)
finally:
await read_stream_writer.aclose()
Expand All @@ -126,8 +126,8 @@ async def post_writer(endpoint_url: str):
)
response.raise_for_status()
logger.debug(f"Client message sent successfully: {response.status_code}")
except Exception as exc:
logger.error(f"Error in post_writer: {exc}")
except Exception:
logger.exception("Error in post_writer")
finally:
await write_stream.aclose()

Expand Down
6 changes: 3 additions & 3 deletions src/mcp/client/streamable_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ async def _handle_json_response(
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
except Exception as exc:
logger.error(f"Error parsing JSON response: {exc}")
logger.exception("Error parsing JSON response")
await read_stream_writer.send(exc)

async def _handle_sse_response(
Expand Down Expand Up @@ -410,8 +410,8 @@ async def handle_request_async():
else:
await handle_request_async()

except Exception as exc:
logger.error(f"Error in post_writer: {exc}")
except Exception:
logger.exception("Error in post_writer")
finally:
await read_stream_writer.aclose()
await write_stream.aclose()
Expand Down
8 changes: 4 additions & 4 deletions src/mcp/os/posix/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ async def terminate_posix_process_tree(process: Process, timeout_seconds: float
process.terminate()
with anyio.fail_after(timeout_seconds):
await process.wait()
except Exception as term_error:
logger.warning(f"Process termination failed for PID {pid}: {term_error}, attempting force kill")
except Exception:
logger.warning(f"Process termination failed for PID {pid}, attempting force kill")
try:
process.kill()
except Exception as kill_error:
logger.error(f"Failed to kill process {pid}: {kill_error}")
except Exception:
logger.exception(f"Failed to kill process {pid}")
4 changes: 2 additions & 2 deletions src/mcp/server/fastmcp/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ async def read_resource(self, uri: AnyUrl | str) -> Iterable[ReadResourceContent
content = await resource.read()
return [ReadResourceContents(content=content, mime_type=resource.mime_type)]
except Exception as e:
logger.error(f"Error reading resource {uri}: {e}")
logger.exception(f"Error reading resource {uri}")
raise ResourceError(str(e))

def add_tool(
Expand Down Expand Up @@ -961,7 +961,7 @@ async def get_prompt(self, name: str, arguments: dict[str, Any] | None = None) -

return GetPromptResult(messages=pydantic_core.to_jsonable_python(messages))
except Exception as e:
logger.error(f"Error getting prompt {name}: {e}")
logger.exception(f"Error getting prompt {name}")
raise ValueError(str(e))


Expand Down
6 changes: 3 additions & 3 deletions src/mcp/server/sse.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ def __init__(self, endpoint: str, security_settings: TransportSecuritySettings |
# Validate that endpoint is a relative path and not a full URL
if "://" in endpoint or endpoint.startswith("//") or "?" in endpoint or "#" in endpoint:
raise ValueError(
f"Given endpoint: {endpoint} is not a relative path (e.g., '/messages/'), \
expecting a relative path(e.g., '/messages/')."
f"Given endpoint: {endpoint} is not a relative path (e.g., '/messages/'), "
"expecting a relative path (e.g., '/messages/')."
)

# Ensure endpoint starts with a forward slash
Expand Down Expand Up @@ -234,7 +234,7 @@ async def handle_post_message(self, scope: Scope, receive: Receive, send: Send)
message = types.JSONRPCMessage.model_validate_json(body)
logger.debug(f"Validated client message: {message}")
except ValidationError as err:
logger.error(f"Failed to parse message: {err}")
logger.exception("Failed to parse message")
response = Response("Could not parse message", status_code=400)
await response(scope, receive, send)
await writer.send(err)
Expand Down
54 changes: 25 additions & 29 deletions src/mcp/server/streamable_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,9 @@ async def _clean_up_memory_streams(self, request_id: RequestId) -> None:
# Close the request stream
await self._request_streams[request_id][0].aclose()
await self._request_streams[request_id][1].aclose()
except Exception as e:
logger.debug(f"Error closing memory streams: {e}")
except Exception:
# During cleanup, we catch all exceptions since streams might be in various states
logger.debug("Error closing memory streams - may already be closed")
finally:
# Remove the request stream from the mapping
self._request_streams.pop(request_id, None)
Expand Down Expand Up @@ -421,10 +422,10 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re
HTTPStatus.INTERNAL_SERVER_ERROR,
)
await response(scope, receive, send)
except Exception as e:
logger.exception(f"Error processing JSON response: {e}")
except Exception:
logger.exception("Error processing JSON response")
response = self._create_error_response(
f"Error processing request: {str(e)}",
"Error processing request",
HTTPStatus.INTERNAL_SERVER_ERROR,
INTERNAL_ERROR,
)
Expand All @@ -451,8 +452,8 @@ async def sse_writer():
JSONRPCResponse | JSONRPCError,
):
break
except Exception as e:
logger.exception(f"Error in SSE writer: {e}")
except Exception:
logger.exception("Error in SSE writer")
finally:
logger.debug("Closing SSE writer")
await self._clean_up_memory_streams(request_id)
Expand Down Expand Up @@ -569,8 +570,8 @@ async def standalone_sse_writer():
# Send the message via SSE
event_data = self._create_event_data(event_message)
await sse_stream_writer.send(event_data)
except Exception as e:
logger.exception(f"Error in standalone SSE writer: {e}")
except Exception:
logger.exception("Error in standalone SSE writer")
finally:
logger.debug("Closing standalone SSE writer")
await self._clean_up_memory_streams(GET_STREAM_KEY)
Expand All @@ -585,8 +586,8 @@ async def standalone_sse_writer():
try:
# This will send headers immediately and establish the SSE connection
await response(request.scope, request.receive, send)
except Exception as e:
logger.exception(f"Error in standalone SSE response: {e}")
except Exception:
logger.exception("Error in standalone SSE response")
await sse_stream_writer.aclose()
await sse_stream_reader.aclose()
await self._clean_up_memory_streams(GET_STREAM_KEY)
Expand Down Expand Up @@ -628,10 +629,7 @@ async def _terminate_session(self) -> None:

# Close all request streams asynchronously
for key in request_stream_keys:
try:
await self._clean_up_memory_streams(key)
except Exception as e:
logger.debug(f"Error closing stream {key} during termination: {e}")
await self._clean_up_memory_streams(key)

# Clear the request streams dictionary immediately
self._request_streams.clear()
Expand All @@ -645,6 +643,7 @@ async def _terminate_session(self) -> None:
if self._write_stream is not None:
await self._write_stream.aclose()
except Exception as e:
# During cleanup, we catch all exceptions since streams might be in various states
logger.debug(f"Error closing streams: {e}")

async def _handle_unsupported_request(self, request: Request, send: Send) -> None:
Expand Down Expand Up @@ -765,8 +764,8 @@ async def send_event(event_message: EventMessage) -> None:
event_data = self._create_event_data(event_message)

await sse_stream_writer.send(event_data)
except Exception as e:
logger.exception(f"Error in replay sender: {e}")
except Exception:
logger.exception("Error in replay sender")

# Create and start EventSourceResponse
response = EventSourceResponse(
Expand All @@ -777,16 +776,16 @@ async def send_event(event_message: EventMessage) -> None:

try:
await response(request.scope, request.receive, send)
except Exception as e:
logger.exception(f"Error in replay response: {e}")
except Exception:
logger.exception("Error in replay response")
finally:
await sse_stream_writer.aclose()
await sse_stream_reader.aclose()

except Exception as e:
logger.exception(f"Error replaying events: {e}")
except Exception:
logger.exception("Error replaying events")
response = self._create_error_response(
f"Error replaying events: {str(e)}",
"Error replaying events",
HTTPStatus.INTERNAL_SERVER_ERROR,
INTERNAL_ERROR,
)
Expand Down Expand Up @@ -874,8 +873,8 @@ async def message_router():
for message. Still processing message as the client
might reconnect and replay."""
)
except Exception as e:
logger.exception(f"Error in message router: {e}")
except Exception:
logger.exception("Error in message router")

# Start the message router
tg.start_soon(message_router)
Expand All @@ -885,11 +884,7 @@ async def message_router():
yield read_stream, write_stream
finally:
for stream_id in list(self._request_streams.keys()):
try:
await self._clean_up_memory_streams(stream_id)
except Exception as e:
logger.debug(f"Error closing request stream: {e}")
pass
await self._clean_up_memory_streams(stream_id)
self._request_streams.clear()

# Clean up the read and write streams
Expand All @@ -899,4 +894,5 @@ async def message_router():
await write_stream_reader.aclose()
await write_stream.aclose()
except Exception as e:
# During cleanup, we catch all exceptions since streams might be in various states
logger.debug(f"Error closing streams: {e}")
Loading