web_protocol.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792
  1. import asyncio
  2. import asyncio.streams
  3. import sys
  4. import traceback
  5. import warnings
  6. from collections import deque
  7. from contextlib import suppress
  8. from html import escape as html_escape
  9. from http import HTTPStatus
  10. from logging import Logger
  11. from typing import (
  12. TYPE_CHECKING,
  13. Any,
  14. Awaitable,
  15. Callable,
  16. Deque,
  17. Optional,
  18. Sequence,
  19. Tuple,
  20. Type,
  21. Union,
  22. cast,
  23. )
  24. import attr
  25. import yarl
  26. from propcache import under_cached_property
  27. from .abc import AbstractAccessLogger, AbstractStreamWriter
  28. from .base_protocol import BaseProtocol
  29. from .helpers import ceil_timeout
  30. from .http import (
  31. HttpProcessingError,
  32. HttpRequestParser,
  33. HttpVersion10,
  34. RawRequestMessage,
  35. StreamWriter,
  36. )
  37. from .http_exceptions import BadHttpMethod
  38. from .log import access_logger, server_logger
  39. from .streams import EMPTY_PAYLOAD, StreamReader
  40. from .tcp_helpers import tcp_keepalive
  41. from .web_exceptions import HTTPException, HTTPInternalServerError
  42. from .web_log import AccessLogger
  43. from .web_request import BaseRequest
  44. from .web_response import Response, StreamResponse
  45. __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
  46. if TYPE_CHECKING:
  47. import ssl
  48. from .web_server import Server
  49. _RequestFactory = Callable[
  50. [
  51. RawRequestMessage,
  52. StreamReader,
  53. "RequestHandler",
  54. AbstractStreamWriter,
  55. "asyncio.Task[None]",
  56. ],
  57. BaseRequest,
  58. ]
  59. _RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
  60. ERROR = RawRequestMessage(
  61. "UNKNOWN",
  62. "/",
  63. HttpVersion10,
  64. {}, # type: ignore[arg-type]
  65. {}, # type: ignore[arg-type]
  66. True,
  67. None,
  68. False,
  69. False,
  70. yarl.URL("/"),
  71. )
  72. class RequestPayloadError(Exception):
  73. """Payload parsing error."""
  74. class PayloadAccessError(Exception):
  75. """Payload was accessed after response was sent."""
  76. _PAYLOAD_ACCESS_ERROR = PayloadAccessError()
  77. @attr.s(auto_attribs=True, frozen=True, slots=True)
  78. class _ErrInfo:
  79. status: int
  80. exc: BaseException
  81. message: str
  82. _MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
  83. class RequestHandler(BaseProtocol):
  84. """HTTP protocol implementation.
  85. RequestHandler handles incoming HTTP request. It reads request line,
  86. request headers and request payload and calls handle_request() method.
  87. By default it always returns with 404 response.
  88. RequestHandler handles errors in incoming request, like bad
  89. status line, bad headers or incomplete payload. If any error occurs,
  90. connection gets closed.
  91. keepalive_timeout -- number of seconds before closing
  92. keep-alive connection
  93. tcp_keepalive -- TCP keep-alive is on, default is on
  94. debug -- enable debug mode
  95. logger -- custom logger object
  96. access_log_class -- custom class for access_logger
  97. access_log -- custom logging object
  98. access_log_format -- access log format string
  99. loop -- Optional event loop
  100. max_line_size -- Optional maximum header line size
  101. max_field_size -- Optional maximum header field size
  102. max_headers -- Optional maximum header size
  103. timeout_ceil_threshold -- Optional value to specify
  104. threshold to ceil() timeout
  105. values
  106. """
  107. __slots__ = (
  108. "_request_count",
  109. "_keepalive",
  110. "_manager",
  111. "_request_handler",
  112. "_request_factory",
  113. "_tcp_keepalive",
  114. "_next_keepalive_close_time",
  115. "_keepalive_handle",
  116. "_keepalive_timeout",
  117. "_lingering_time",
  118. "_messages",
  119. "_message_tail",
  120. "_handler_waiter",
  121. "_waiter",
  122. "_task_handler",
  123. "_upgrade",
  124. "_payload_parser",
  125. "_request_parser",
  126. "_reading_paused",
  127. "logger",
  128. "debug",
  129. "access_log",
  130. "access_logger",
  131. "_close",
  132. "_force_close",
  133. "_current_request",
  134. "_timeout_ceil_threshold",
  135. "_request_in_progress",
  136. "_logging_enabled",
  137. "_cache",
  138. )
  139. def __init__(
  140. self,
  141. manager: "Server",
  142. *,
  143. loop: asyncio.AbstractEventLoop,
  144. # Default should be high enough that it's likely longer than a reverse proxy.
  145. keepalive_timeout: float = 3630,
  146. tcp_keepalive: bool = True,
  147. logger: Logger = server_logger,
  148. access_log_class: Type[AbstractAccessLogger] = AccessLogger,
  149. access_log: Logger = access_logger,
  150. access_log_format: str = AccessLogger.LOG_FORMAT,
  151. debug: bool = False,
  152. max_line_size: int = 8190,
  153. max_headers: int = 32768,
  154. max_field_size: int = 8190,
  155. lingering_time: float = 10.0,
  156. read_bufsize: int = 2**16,
  157. auto_decompress: bool = True,
  158. timeout_ceil_threshold: float = 5,
  159. ):
  160. super().__init__(loop)
  161. # _request_count is the number of requests processed with the same connection.
  162. self._request_count = 0
  163. self._keepalive = False
  164. self._current_request: Optional[BaseRequest] = None
  165. self._manager: Optional[Server] = manager
  166. self._request_handler: Optional[_RequestHandler] = manager.request_handler
  167. self._request_factory: Optional[_RequestFactory] = manager.request_factory
  168. self._tcp_keepalive = tcp_keepalive
  169. # placeholder to be replaced on keepalive timeout setup
  170. self._next_keepalive_close_time = 0.0
  171. self._keepalive_handle: Optional[asyncio.Handle] = None
  172. self._keepalive_timeout = keepalive_timeout
  173. self._lingering_time = float(lingering_time)
  174. self._messages: Deque[_MsgType] = deque()
  175. self._message_tail = b""
  176. self._waiter: Optional[asyncio.Future[None]] = None
  177. self._handler_waiter: Optional[asyncio.Future[None]] = None
  178. self._task_handler: Optional[asyncio.Task[None]] = None
  179. self._upgrade = False
  180. self._payload_parser: Any = None
  181. self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
  182. self,
  183. loop,
  184. read_bufsize,
  185. max_line_size=max_line_size,
  186. max_field_size=max_field_size,
  187. max_headers=max_headers,
  188. payload_exception=RequestPayloadError,
  189. auto_decompress=auto_decompress,
  190. )
  191. self._timeout_ceil_threshold: float = 5
  192. try:
  193. self._timeout_ceil_threshold = float(timeout_ceil_threshold)
  194. except (TypeError, ValueError):
  195. pass
  196. self.logger = logger
  197. self.debug = debug
  198. self.access_log = access_log
  199. if access_log:
  200. self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
  201. access_log, access_log_format
  202. )
  203. self._logging_enabled = self.access_logger.enabled
  204. else:
  205. self.access_logger = None
  206. self._logging_enabled = False
  207. self._close = False
  208. self._force_close = False
  209. self._request_in_progress = False
  210. self._cache: dict[str, Any] = {}
  211. def __repr__(self) -> str:
  212. return "<{} {}>".format(
  213. self.__class__.__name__,
  214. "connected" if self.transport is not None else "disconnected",
  215. )
  216. @under_cached_property
  217. def ssl_context(self) -> Optional["ssl.SSLContext"]:
  218. """Return SSLContext if available."""
  219. return (
  220. None
  221. if self.transport is None
  222. else self.transport.get_extra_info("sslcontext")
  223. )
  224. @under_cached_property
  225. def peername(
  226. self,
  227. ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]:
  228. """Return peername if available."""
  229. return (
  230. None
  231. if self.transport is None
  232. else self.transport.get_extra_info("peername")
  233. )
  234. @property
  235. def keepalive_timeout(self) -> float:
  236. return self._keepalive_timeout
  237. async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
  238. """Do worker process exit preparations.
  239. We need to clean up everything and stop accepting requests.
  240. It is especially important for keep-alive connections.
  241. """
  242. self._force_close = True
  243. if self._keepalive_handle is not None:
  244. self._keepalive_handle.cancel()
  245. # Wait for graceful handler completion
  246. if self._request_in_progress:
  247. # The future is only created when we are shutting
  248. # down while the handler is still processing a request
  249. # to avoid creating a future for every request.
  250. self._handler_waiter = self._loop.create_future()
  251. try:
  252. async with ceil_timeout(timeout):
  253. await self._handler_waiter
  254. except (asyncio.CancelledError, asyncio.TimeoutError):
  255. self._handler_waiter = None
  256. if (
  257. sys.version_info >= (3, 11)
  258. and (task := asyncio.current_task())
  259. and task.cancelling()
  260. ):
  261. raise
  262. # Then cancel handler and wait
  263. try:
  264. async with ceil_timeout(timeout):
  265. if self._current_request is not None:
  266. self._current_request._cancel(asyncio.CancelledError())
  267. if self._task_handler is not None and not self._task_handler.done():
  268. await asyncio.shield(self._task_handler)
  269. except (asyncio.CancelledError, asyncio.TimeoutError):
  270. if (
  271. sys.version_info >= (3, 11)
  272. and (task := asyncio.current_task())
  273. and task.cancelling()
  274. ):
  275. raise
  276. # force-close non-idle handler
  277. if self._task_handler is not None:
  278. self._task_handler.cancel()
  279. self.force_close()
  280. def connection_made(self, transport: asyncio.BaseTransport) -> None:
  281. super().connection_made(transport)
  282. real_transport = cast(asyncio.Transport, transport)
  283. if self._tcp_keepalive:
  284. tcp_keepalive(real_transport)
  285. assert self._manager is not None
  286. self._manager.connection_made(self, real_transport)
  287. loop = self._loop
  288. if sys.version_info >= (3, 12):
  289. task = asyncio.Task(self.start(), loop=loop, eager_start=True)
  290. else:
  291. task = loop.create_task(self.start())
  292. self._task_handler = task
  293. def connection_lost(self, exc: Optional[BaseException]) -> None:
  294. if self._manager is None:
  295. return
  296. self._manager.connection_lost(self, exc)
  297. # Grab value before setting _manager to None.
  298. handler_cancellation = self._manager.handler_cancellation
  299. self.force_close()
  300. super().connection_lost(exc)
  301. self._manager = None
  302. self._request_factory = None
  303. self._request_handler = None
  304. self._request_parser = None
  305. if self._keepalive_handle is not None:
  306. self._keepalive_handle.cancel()
  307. if self._current_request is not None:
  308. if exc is None:
  309. exc = ConnectionResetError("Connection lost")
  310. self._current_request._cancel(exc)
  311. if handler_cancellation and self._task_handler is not None:
  312. self._task_handler.cancel()
  313. self._task_handler = None
  314. if self._payload_parser is not None:
  315. self._payload_parser.feed_eof()
  316. self._payload_parser = None
  317. def set_parser(self, parser: Any) -> None:
  318. # Actual type is WebReader
  319. assert self._payload_parser is None
  320. self._payload_parser = parser
  321. if self._message_tail:
  322. self._payload_parser.feed_data(self._message_tail)
  323. self._message_tail = b""
  324. def eof_received(self) -> None:
  325. pass
  326. def data_received(self, data: bytes) -> None:
  327. if self._force_close or self._close:
  328. return
  329. # parse http messages
  330. messages: Sequence[_MsgType]
  331. if self._payload_parser is None and not self._upgrade:
  332. assert self._request_parser is not None
  333. try:
  334. messages, upgraded, tail = self._request_parser.feed_data(data)
  335. except HttpProcessingError as exc:
  336. messages = [
  337. (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
  338. ]
  339. upgraded = False
  340. tail = b""
  341. for msg, payload in messages or ():
  342. self._request_count += 1
  343. self._messages.append((msg, payload))
  344. waiter = self._waiter
  345. if messages and waiter is not None and not waiter.done():
  346. # don't set result twice
  347. waiter.set_result(None)
  348. self._upgrade = upgraded
  349. if upgraded and tail:
  350. self._message_tail = tail
  351. # no parser, just store
  352. elif self._payload_parser is None and self._upgrade and data:
  353. self._message_tail += data
  354. # feed payload
  355. elif data:
  356. eof, tail = self._payload_parser.feed_data(data)
  357. if eof:
  358. self.close()
  359. def keep_alive(self, val: bool) -> None:
  360. """Set keep-alive connection mode.
  361. :param bool val: new state.
  362. """
  363. self._keepalive = val
  364. if self._keepalive_handle:
  365. self._keepalive_handle.cancel()
  366. self._keepalive_handle = None
  367. def close(self) -> None:
  368. """Close connection.
  369. Stop accepting new pipelining messages and close
  370. connection when handlers done processing messages.
  371. """
  372. self._close = True
  373. if self._waiter:
  374. self._waiter.cancel()
  375. def force_close(self) -> None:
  376. """Forcefully close connection."""
  377. self._force_close = True
  378. if self._waiter:
  379. self._waiter.cancel()
  380. if self.transport is not None:
  381. self.transport.close()
  382. self.transport = None
  383. def log_access(
  384. self, request: BaseRequest, response: StreamResponse, time: Optional[float]
  385. ) -> None:
  386. if self._logging_enabled and self.access_logger is not None:
  387. if TYPE_CHECKING:
  388. assert time is not None
  389. self.access_logger.log(request, response, self._loop.time() - time)
  390. def log_debug(self, *args: Any, **kw: Any) -> None:
  391. if self.debug:
  392. self.logger.debug(*args, **kw)
  393. def log_exception(self, *args: Any, **kw: Any) -> None:
  394. self.logger.exception(*args, **kw)
  395. def _process_keepalive(self) -> None:
  396. self._keepalive_handle = None
  397. if self._force_close or not self._keepalive:
  398. return
  399. loop = self._loop
  400. now = loop.time()
  401. close_time = self._next_keepalive_close_time
  402. if now < close_time:
  403. # Keep alive close check fired too early, reschedule
  404. self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
  405. return
  406. # handler in idle state
  407. if self._waiter and not self._waiter.done():
  408. self.force_close()
  409. async def _handle_request(
  410. self,
  411. request: BaseRequest,
  412. start_time: Optional[float],
  413. request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
  414. ) -> Tuple[StreamResponse, bool]:
  415. self._request_in_progress = True
  416. try:
  417. try:
  418. self._current_request = request
  419. resp = await request_handler(request)
  420. finally:
  421. self._current_request = None
  422. except HTTPException as exc:
  423. resp = exc
  424. resp, reset = await self.finish_response(request, resp, start_time)
  425. except asyncio.CancelledError:
  426. raise
  427. except asyncio.TimeoutError as exc:
  428. self.log_debug("Request handler timed out.", exc_info=exc)
  429. resp = self.handle_error(request, 504)
  430. resp, reset = await self.finish_response(request, resp, start_time)
  431. except Exception as exc:
  432. resp = self.handle_error(request, 500, exc)
  433. resp, reset = await self.finish_response(request, resp, start_time)
  434. else:
  435. # Deprecation warning (See #2415)
  436. if getattr(resp, "__http_exception__", False):
  437. warnings.warn(
  438. "returning HTTPException object is deprecated "
  439. "(#2415) and will be removed, "
  440. "please raise the exception instead",
  441. DeprecationWarning,
  442. )
  443. resp, reset = await self.finish_response(request, resp, start_time)
  444. finally:
  445. self._request_in_progress = False
  446. if self._handler_waiter is not None:
  447. self._handler_waiter.set_result(None)
  448. return resp, reset
  449. async def start(self) -> None:
  450. """Process incoming request.
  451. It reads request line, request headers and request payload, then
  452. calls handle_request() method. Subclass has to override
  453. handle_request(). start() handles various exceptions in request
  454. or response handling. Connection is being closed always unless
  455. keep_alive(True) specified.
  456. """
  457. loop = self._loop
  458. manager = self._manager
  459. assert manager is not None
  460. keepalive_timeout = self._keepalive_timeout
  461. resp = None
  462. assert self._request_factory is not None
  463. assert self._request_handler is not None
  464. while not self._force_close:
  465. if not self._messages:
  466. try:
  467. # wait for next request
  468. self._waiter = loop.create_future()
  469. await self._waiter
  470. finally:
  471. self._waiter = None
  472. message, payload = self._messages.popleft()
  473. # time is only fetched if logging is enabled as otherwise
  474. # its thrown away and never used.
  475. start = loop.time() if self._logging_enabled else None
  476. manager.requests_count += 1
  477. writer = StreamWriter(self, loop)
  478. if isinstance(message, _ErrInfo):
  479. # make request_factory work
  480. request_handler = self._make_error_handler(message)
  481. message = ERROR
  482. else:
  483. request_handler = self._request_handler
  484. # Important don't hold a reference to the current task
  485. # as on traceback it will prevent the task from being
  486. # collected and will cause a memory leak.
  487. request = self._request_factory(
  488. message,
  489. payload,
  490. self,
  491. writer,
  492. self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type]
  493. )
  494. try:
  495. # a new task is used for copy context vars (#3406)
  496. coro = self._handle_request(request, start, request_handler)
  497. if sys.version_info >= (3, 12):
  498. task = asyncio.Task(coro, loop=loop, eager_start=True)
  499. else:
  500. task = loop.create_task(coro)
  501. try:
  502. resp, reset = await task
  503. except ConnectionError:
  504. self.log_debug("Ignored premature client disconnection")
  505. break
  506. # Drop the processed task from asyncio.Task.all_tasks() early
  507. del task
  508. if reset:
  509. self.log_debug("Ignored premature client disconnection 2")
  510. break
  511. # notify server about keep-alive
  512. self._keepalive = bool(resp.keep_alive)
  513. # check payload
  514. if not payload.is_eof():
  515. lingering_time = self._lingering_time
  516. if not self._force_close and lingering_time:
  517. self.log_debug(
  518. "Start lingering close timer for %s sec.", lingering_time
  519. )
  520. now = loop.time()
  521. end_t = now + lingering_time
  522. try:
  523. while not payload.is_eof() and now < end_t:
  524. async with ceil_timeout(end_t - now):
  525. # read and ignore
  526. await payload.readany()
  527. now = loop.time()
  528. except (asyncio.CancelledError, asyncio.TimeoutError):
  529. if (
  530. sys.version_info >= (3, 11)
  531. and (t := asyncio.current_task())
  532. and t.cancelling()
  533. ):
  534. raise
  535. # if payload still uncompleted
  536. if not payload.is_eof() and not self._force_close:
  537. self.log_debug("Uncompleted request.")
  538. self.close()
  539. payload.set_exception(_PAYLOAD_ACCESS_ERROR)
  540. except asyncio.CancelledError:
  541. self.log_debug("Ignored premature client disconnection")
  542. self.force_close()
  543. raise
  544. except Exception as exc:
  545. self.log_exception("Unhandled exception", exc_info=exc)
  546. self.force_close()
  547. except BaseException:
  548. self.force_close()
  549. raise
  550. finally:
  551. request._task = None # type: ignore[assignment] # Break reference cycle in case of exception
  552. if self.transport is None and resp is not None:
  553. self.log_debug("Ignored premature client disconnection.")
  554. if self._keepalive and not self._close and not self._force_close:
  555. # start keep-alive timer
  556. close_time = loop.time() + keepalive_timeout
  557. self._next_keepalive_close_time = close_time
  558. if self._keepalive_handle is None:
  559. self._keepalive_handle = loop.call_at(
  560. close_time, self._process_keepalive
  561. )
  562. else:
  563. break
  564. # remove handler, close transport if no handlers left
  565. if not self._force_close:
  566. self._task_handler = None
  567. if self.transport is not None:
  568. self.transport.close()
  569. async def finish_response(
  570. self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float]
  571. ) -> Tuple[StreamResponse, bool]:
  572. """Prepare the response and write_eof, then log access.
  573. This has to
  574. be called within the context of any exception so the access logger
  575. can get exception information. Returns True if the client disconnects
  576. prematurely.
  577. """
  578. request._finish()
  579. if self._request_parser is not None:
  580. self._request_parser.set_upgraded(False)
  581. self._upgrade = False
  582. if self._message_tail:
  583. self._request_parser.feed_data(self._message_tail)
  584. self._message_tail = b""
  585. try:
  586. prepare_meth = resp.prepare
  587. except AttributeError:
  588. if resp is None:
  589. self.log_exception("Missing return statement on request handler")
  590. else:
  591. self.log_exception(
  592. "Web-handler should return a response instance, "
  593. "got {!r}".format(resp)
  594. )
  595. exc = HTTPInternalServerError()
  596. resp = Response(
  597. status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
  598. )
  599. prepare_meth = resp.prepare
  600. try:
  601. await prepare_meth(request)
  602. await resp.write_eof()
  603. except ConnectionError:
  604. self.log_access(request, resp, start_time)
  605. return resp, True
  606. self.log_access(request, resp, start_time)
  607. return resp, False
  608. def handle_error(
  609. self,
  610. request: BaseRequest,
  611. status: int = 500,
  612. exc: Optional[BaseException] = None,
  613. message: Optional[str] = None,
  614. ) -> StreamResponse:
  615. """Handle errors.
  616. Returns HTTP response with specific status code. Logs additional
  617. information. It always closes current connection.
  618. """
  619. if self._request_count == 1 and isinstance(exc, BadHttpMethod):
  620. # BadHttpMethod is common when a client sends non-HTTP
  621. # or encrypted traffic to an HTTP port. This is expected
  622. # to happen when connected to the public internet so we log
  623. # it at the debug level as to not fill logs with noise.
  624. self.logger.debug(
  625. "Error handling request from %s", request.remote, exc_info=exc
  626. )
  627. else:
  628. self.log_exception(
  629. "Error handling request from %s", request.remote, exc_info=exc
  630. )
  631. # some data already got sent, connection is broken
  632. if request.writer.output_size > 0:
  633. raise ConnectionError(
  634. "Response is sent already, cannot send another response "
  635. "with the error message"
  636. )
  637. ct = "text/plain"
  638. if status == HTTPStatus.INTERNAL_SERVER_ERROR:
  639. title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
  640. msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
  641. tb = None
  642. if self.debug:
  643. with suppress(Exception):
  644. tb = traceback.format_exc()
  645. if "text/html" in request.headers.get("Accept", ""):
  646. if tb:
  647. tb = html_escape(tb)
  648. msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
  649. message = (
  650. "<html><head>"
  651. "<title>{title}</title>"
  652. "</head><body>\n<h1>{title}</h1>"
  653. "\n{msg}\n</body></html>\n"
  654. ).format(title=title, msg=msg)
  655. ct = "text/html"
  656. else:
  657. if tb:
  658. msg = tb
  659. message = title + "\n\n" + msg
  660. resp = Response(status=status, text=message, content_type=ct)
  661. resp.force_close()
  662. return resp
  663. def _make_error_handler(
  664. self, err_info: _ErrInfo
  665. ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
  666. async def handler(request: BaseRequest) -> StreamResponse:
  667. return self.handle_error(
  668. request, err_info.status, err_info.exc, err_info.message
  669. )
  670. return handler