Skip to content

Commit

Permalink
refactor: re-implement pygls' builtin handlers using generators
Browse files Browse the repository at this point in the history
The underlying cause of openlawlibrary#433 is that pygls' current implementation of
builtin feature handlers cannot guarantee that an async user handler
will finish executing before pygls responds with the answer generated
from the builtin handler.

This commit adds support for another execution model, generators.
A generator handler can yield to another sub-handler method like so

```
yield handler_func, args, kwargs
```

The `JsonRPCProtocol` class with then schedule the execution of
`handler_func(*args, **kwargs)` as if it were a normal handler
function (meaning `handler_func could be async, threaded, sync or a
generator itself!)

The result of the sub-handler is then sent back into the generator
handler allowing the top-level handler to continue and even make use
of the result!

This gives pygls' built-in handlers much greater control over exactly
when a user handler is called, allowing us to fix openlawlibrary#433 and opens up a
lot other exciting possibilities!

This also removes the need for the `LSPMeta` metaclass, so it and the
corresponding module have been deleted.
  • Loading branch information
alcarney committed Nov 30, 2024
1 parent b4580b8 commit 2d9513f
Show file tree
Hide file tree
Showing 5 changed files with 173 additions and 146 deletions.
6 changes: 1 addition & 5 deletions pygls/protocol/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
from typing import Any

from collections import namedtuple
from typing import Any

from lsprotocol import converters

Expand All @@ -12,7 +11,6 @@
JsonRPCResponseMessage,
)
from pygls.protocol.language_server import LanguageServerProtocol, lsp_method
from pygls.protocol.lsp_meta import LSPMeta, call_user_feature


def _dict_to_object(d: Any):
Expand Down Expand Up @@ -68,8 +66,6 @@ def default_converter():
"JsonRPCRequestMessage",
"JsonRPCResponseMessage",
"JsonRPCNotification",
"LSPMeta",
"call_user_feature",
"_dict_to_object",
"_params_field_structure_hook",
"_result_field_structure_hook",
Expand Down
113 changes: 92 additions & 21 deletions pygls/protocol/json_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from lsprotocol.types import (
CANCEL_REQUEST,
EXIT,
WORKSPACE_EXECUTE_COMMAND,
ResponseError,
ResponseErrorMessage,
)
Expand All @@ -51,6 +50,8 @@
from pygls.feature_manager import FeatureManager, is_thread_function

if typing.TYPE_CHECKING:
from collections.abc import Generator

from cattrs import Converter

from pygls.io_ import AsyncWriter, Writer
Expand Down Expand Up @@ -130,8 +131,9 @@ def _execute_handler(
self,
msg_id: str | int,
handler: MessageHandler,
params: Any,
callback: MessageCallback,
args: tuple[Any, ...] | None = None,
kwargs: dict[str, Any] | None = None,
):
"""Execute the given message handler.
Expand All @@ -143,34 +145,103 @@ def _execute_handler(
handler
The request handler to call
params
The parameters object to pass to the handler
callback
An optional callback function to call upon completion of the handler
args
Positional arguments to pass to the handler
kwargs
Keyword arguments to pass to the handler
"""

args = args or tuple()
kwargs = kwargs or {}

if asyncio.iscoroutinefunction(handler):
future = asyncio.ensure_future(handler(params))
future = asyncio.ensure_future(handler(*args, **kwargs))
self._request_futures[msg_id] = future
future.add_done_callback(callback)

elif is_thread_function(handler):
future = self._server.thread_pool.submit(handler, params)
future = self._server.thread_pool.submit(handler, *args, **kwargs)
self._request_futures[msg_id] = future
future.add_done_callback(callback)

elif inspect.isgeneratorfunction(handler):
future: Future[Any] = Future()
self._request_futures[msg_id] = future
future.add_done_callback(callback)

try:
self._run_generator(
future=None, gen=handler(*args, **kwargs), result_future=future
)
except Exception as exc:
future.set_exception(exc)

else:
# While a future is not necessary for a synchronous function, it allows us to use a single
# pattern across all handler types
future: Future[Any] = Future()
future.add_done_callback(callback)

try:
result = handler(params)
result = handler(*args, **kwargs)
future.set_result(result)
except Exception as exc:
future.set_exception(exc)

def _run_generator(
self,
future: Future[Any] | None,
*,
gen: Generator[Any, Any, Any],
result_future: Future[Any],
):
"""Run the next portion of the given generator.
Generator handlers are designed to ``yield`` to other handlers that are executed
separately before their results are sent back into the generator allowing
execution to continue.
Generator handlers are primarily used in the implementation of pygls' builtin
feature handlers.
Parameters
----------
future
The future that contains the result of the previously executed handler, if any
gen
The generator to run
result_future
The future to send the final result to once the generator stops.
"""

if result_future.cancelled():
return

try:
value = future.result() if future is not None else None
handler, args, kwargs = gen.send(value)

self._execute_handler(
str(uuid.uuid4()),
handler,
args=args,
kwargs=kwargs,
callback=partial(
self._run_generator, gen=gen, result_future=result_future
),
)
except StopIteration as result:
result_future.set_result(result.value)

except Exception as exc:
result_future.set_exception(exc)

def _send_handler_result(self, future: Future[Any], *, msg_id: str | int):
"""Callback function that sends the result of the given future to the client.
Expand All @@ -192,6 +263,7 @@ def _send_handler_result(self, future: Future[Any], *, msg_id: str | int):
error = JsonRpcInternalError.of(sys.exc_info())
logger.exception('Exception occurred for message "%s": %s', msg_id, error)
self._send_response(msg_id, error=error.to_response_error())
self._server._report_server_error(error, FeatureRequestError)

def _check_handler_result(self, future: Future[Any]):
"""Check the result of the future to see if an error occurred.
Expand Down Expand Up @@ -237,7 +309,10 @@ def _handle_notification(self, method_name, params):
try:
handler = self._get_handler(method_name)
self._execute_handler(
str(uuid.uuid4()), handler, params, self._check_handler_result
msg_id=str(uuid.uuid4()),
handler=handler,
args=(params,),
callback=self._check_handler_result,
)
except JsonRpcMethodNotFound:
logger.warning("Ignoring notification for unknown method %r", method_name)
Expand All @@ -255,16 +330,12 @@ def _handle_request(self, msg_id, method_name, params):
try:
handler = self._get_handler(method_name)

# workspace/executeCommand is a special case
if method_name == WORKSPACE_EXECUTE_COMMAND:
handler(params, msg_id)
else:
self._execute_handler(
msg_id,
handler,
params,
callback=partial(self._send_handler_result, msg_id=msg_id),
)
self._execute_handler(
msg_id=msg_id,
handler=handler,
args=(params,),
callback=partial(self._send_handler_result, msg_id=msg_id),
)

except JsonRpcMethodNotFound as error:
logger.warning(
Expand Down Expand Up @@ -369,10 +440,10 @@ def handle_message(self, message):

if hasattr(message, "method"):
if hasattr(message, "id"):
logger.debug("Request message received.")
logger.debug("Request %r received", message.method)
self._handle_request(message.id, message.method, message.params)
else:
logger.debug("Notification message received.")
logger.debug("Notification %r received", message.method)
self._handle_notification(message.method, message.params)
else:
if hasattr(message, "error"):
Expand Down
Loading

0 comments on commit 2d9513f

Please # to comment.