From e1002d5affaca5c9e567db0c2d1bed8c1b8fd384 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 21 Nov 2024 14:06:29 -0500 Subject: [PATCH 01/38] adding support for async callbacks and page layouts --- dash/_callback.py | 14 +++++--- dash/dash.py | 86 ++++++++++++++++++++++++++++------------------- 2 files changed, 62 insertions(+), 38 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 071c209dec..03111cfddc 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -4,6 +4,7 @@ from typing import Callable, Optional, Any import flask +import asyncio from .dependencies import ( handle_callback_args, @@ -39,8 +40,13 @@ from ._callback_context import context_value -def _invoke_callback(func, *args, **kwargs): # used to mark the frame for the debugger - return func(*args, **kwargs) # %% callback invoked %% +async def _invoke_callback(func, *args, **kwargs): + # Check if the function is a coroutine function + if asyncio.iscoroutinefunction(func): + return await func(*args, **kwargs) + else: + # If the function is not a coroutine, call it directly + return func(*args, **kwargs) class NoUpdate: @@ -353,7 +359,7 @@ def wrap_func(func): ) @wraps(func) - def add_context(*args, **kwargs): + async def add_context(*args, **kwargs): output_spec = kwargs.pop("outputs_list") app_callback_manager = kwargs.pop("long_callback_manager", None) @@ -493,7 +499,7 @@ def add_context(*args, **kwargs): return to_json(response) else: try: - output_value = _invoke_callback(func, *func_args, **func_kwargs) + output_value = await _invoke_callback(func, *func_args, **func_kwargs) except PreventUpdate as err: raise err except Exception as err: # pylint: disable=broad-exception-caught diff --git a/dash/dash.py b/dash/dash.py index 3ad375c823..e783ca93be 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -19,6 +19,7 @@ from typing import Any, Callable, Dict, Optional, Union, List import flask +import asyncio from importlib_metadata import version as _get_distribution_version @@ -191,6 +192,14 @@ def _do_skip(error): # Singleton signal to not update an output, alternative to PreventUpdate no_update = _callback.NoUpdate() # pylint: disable=protected-access +async def execute_async_function(func, *args, **kwargs): + # Check if the function is a coroutine function + if asyncio.iscoroutinefunction(func): + return await func(*args, **kwargs) + else: + # If the function is not a coroutine, call it directly + return func(*args, **kwargs) + # pylint: disable=too-many-instance-attributes # pylint: disable=too-many-arguments, too-many-locals @@ -1267,7 +1276,7 @@ def long_callback( ) # pylint: disable=R0915 - def dispatch(self): + async def dispatch(self): body = flask.request.get_json() g = AttributeDict({}) @@ -1371,19 +1380,27 @@ def dispatch(self): raise KeyError(msg) from missing_callback_function ctx = copy_context() + # Create a partial function with the necessary arguments # noinspection PyArgumentList + partial_func = functools.partial( + execute_async_function, + func, + *args, + outputs_list=outputs_list, + long_callback_manager=self._background_manager, + callback_context=g, + app=self, + app_on_error=self._on_error, + ) + + response_data = await ctx.run(partial_func) + + # Check if the response is a coroutine + if asyncio.iscoroutine(response_data): + response_data = await response_data + response.set_data( - ctx.run( - functools.partial( - func, - *args, - outputs_list=outputs_list, - long_callback_manager=self._background_manager, - callback_context=g, - app=self, - app_on_error=self._on_error, - ) - ) + response_data ) return response @@ -2206,7 +2223,7 @@ def router(): inputs=inputs, prevent_initial_call=True, ) - def update(pathname_, search_, **states): + async def update(pathname_, search_, **states): """ Updates dash.page_container layout on page navigation. Updates the stored page title which will trigger the clientside callback to update the app title @@ -2231,27 +2248,28 @@ def update(pathname_, search_, **states): layout = page.get("layout", "") title = page["title"] - if callable(layout): - layout = ( - layout(**path_variables, **query_parameters, **states) - if path_variables - else layout(**query_parameters, **states) - ) - if callable(title): - title = title(**path_variables) if path_variables else title() - - return layout, {"title": title} - - _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) - _validate.validate_registry(_pages.PAGE_REGISTRY) - - # Set validation_layout - if not self.config.suppress_callback_exceptions: - self.validation_layout = html.Div( - [ - page["layout"]() if callable(page["layout"]) else page["layout"] - for page in _pages.PAGE_REGISTRY.values() - ] + if callable(layout): + layout = await execute_async_function(layout, + **{**(path_variables or {}), **query_parameters, **states} + ) + if callable(title): + title = await execute_async_function(title, + **(path_variables or {}) + ) + + return layout, {"title": title} + + _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) + _validate.validate_registry(_pages.PAGE_REGISTRY) + + # Set validation_layout + if not self.config.suppress_callback_exceptions: + self.validation_layout = html.Div( + [ + asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ + "layout"] + for page in _pages.PAGE_REGISTRY.values() + ] + [ # pylint: disable=not-callable self.layout() From e24e094b16fcc7c333fc32e04cd11859b6ffd727 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 21 Nov 2024 14:59:30 -0500 Subject: [PATCH 02/38] adding new `use_async` attribute to `Dash` and having callbacks and layouts reference this in order to determine whether or not they should load as async functions. --- dash/_callback.py | 237 +++++++++++++++++++++++++++++++++- dash/dash.py | 314 +++++++++++++++++++++++++++++++++++++--------- 2 files changed, 488 insertions(+), 63 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 03111cfddc..fe1263031c 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -40,7 +40,7 @@ from ._callback_context import context_value -async def _invoke_callback(func, *args, **kwargs): +async def _async_invoke_callback(func, *args, **kwargs): # Check if the function is a coroutine function if asyncio.iscoroutinefunction(func): return await func(*args, **kwargs) @@ -48,6 +48,9 @@ async def _invoke_callback(func, *args, **kwargs): # If the function is not a coroutine, call it directly return func(*args, **kwargs) +def _invoke_callback(func, *args, **kwargs): + return func(*args, **kwargs) + class NoUpdate: def to_plotly_json(self): # pylint: disable=no-self-use @@ -321,6 +324,7 @@ def register_callback( manager = _kwargs.get("manager") running = _kwargs.get("running") on_error = _kwargs.get("on_error") + use_async = _kwargs.get("app_use_async") if running is not None: if not isinstance(running[0], (list, tuple)): running = [running] @@ -359,7 +363,229 @@ def wrap_func(func): ) @wraps(func) - async def add_context(*args, **kwargs): + async def async_add_context(*args, **kwargs): + output_spec = kwargs.pop("outputs_list") + app_callback_manager = kwargs.pop("long_callback_manager", None) + + callback_ctx = kwargs.pop( + "callback_context", AttributeDict({"updated_props": {}}) + ) + app = kwargs.pop("app", None) + callback_manager = long and long.get("manager", app_callback_manager) + error_handler = on_error or kwargs.pop("app_on_error", None) + original_packages = set(ComponentRegistry.registry) + + if has_output: + _validate.validate_output_spec(insert_output, output_spec, Output) + + context_value.set(callback_ctx) + + func_args, func_kwargs = _validate.validate_and_group_input_args( + args, inputs_state_indices + ) + + response: dict = {"multi": True} + has_update = False + + if long is not None: + if not callback_manager: + raise MissingLongCallbackManagerError( + "Running `long` callbacks requires a manager to be installed.\n" + "Available managers:\n" + "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" + " and store results on the local filesystem.\n" + "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" + " and store results on redis.\n" + ) + + progress_outputs = long.get("progress") + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") + old_job = flask.request.args.getlist("oldJob") + + current_key = callback_manager.build_cache_key( + func, + # Inputs provided as dict is kwargs. + func_args if func_args else func_kwargs, + long.get("cache_args_to_ignore", []), + ) + + if old_job: + for job in old_job: + callback_manager.terminate_job(job) + + if not cache_key: + cache_key = current_key + + job_fn = callback_manager.func_registry.get(long_key) + + ctx_value = AttributeDict(**context_value.get()) + ctx_value.ignore_register_page = True + ctx_value.pop("background_callback_manager") + ctx_value.pop("dash_response") + + job = callback_manager.call_job_fn( + cache_key, + job_fn, + func_args if func_args else func_kwargs, + ctx_value, + ) + + data = { + "cacheKey": cache_key, + "job": job, + } + + cancel = long.get("cancel") + if cancel: + data["cancel"] = cancel + + progress_default = long.get("progressDefault") + if progress_default: + data["progressDefault"] = { + str(o): x + for o, x in zip(progress_outputs, progress_default) + } + return to_json(data) + if progress_outputs: + # Get the progress before the result as it would be erased after the results. + progress = callback_manager.get_progress(cache_key) + if progress: + response["progress"] = { + str(x): progress[i] for i, x in enumerate(progress_outputs) + } + + output_value = callback_manager.get_result(cache_key, job_id) + # Must get job_running after get_result since get_results terminates it. + job_running = callback_manager.job_running(job_id) + if not job_running and output_value is callback_manager.UNDEFINED: + # Job canceled -> no output to close the loop. + output_value = NoUpdate() + + elif ( + isinstance(output_value, dict) + and "long_callback_error" in output_value + ): + error = output_value.get("long_callback_error", {}) + exc = LongCallbackError( + f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" + ) + if error_handler: + output_value = error_handler(exc) + + if output_value is None: + output_value = NoUpdate() + # set_props from the error handler uses the original ctx + # instead of manager.get_updated_props since it runs in the + # request process. + has_update = ( + _set_side_update(callback_ctx, response) + or output_value is not None + ) + else: + raise exc + + if job_running and output_value is not callback_manager.UNDEFINED: + # cached results. + callback_manager.terminate_job(job_id) + + if multi and isinstance(output_value, (list, tuple)): + output_value = [ + NoUpdate() if NoUpdate.is_no_update(r) else r + for r in output_value + ] + updated_props = callback_manager.get_updated_props(cache_key) + if len(updated_props) > 0: + response["sideUpdate"] = updated_props + has_update = True + + if output_value is callback_manager.UNDEFINED: + return to_json(response) + else: + try: + output_value = await _async_invoke_callback(func, *func_args, **func_kwargs) + except PreventUpdate as err: + raise err + except Exception as err: # pylint: disable=broad-exception-caught + if error_handler: + output_value = error_handler(err) + + # If the error returns nothing, automatically puts NoUpdate for response. + if output_value is None and has_output: + output_value = NoUpdate() + else: + raise err + + component_ids = collections.defaultdict(dict) + + if has_output: + if not multi: + output_value, output_spec = [output_value], [output_spec] + flat_output_values = output_value + else: + if isinstance(output_value, (list, tuple)): + # For multi-output, allow top-level collection to be + # list or tuple + output_value = list(output_value) + + if NoUpdate.is_no_update(output_value): + flat_output_values = [output_value] + else: + # Flatten grouping and validate grouping structure + flat_output_values = flatten_grouping(output_value, output) + + if not NoUpdate.is_no_update(output_value): + _validate.validate_multi_return( + output_spec, flat_output_values, callback_id + ) + + for val, spec in zip(flat_output_values, output_spec): + if NoUpdate.is_no_update(val): + continue + for vali, speci in ( + zip(val, spec) if isinstance(spec, list) else [[val, spec]] + ): + if not NoUpdate.is_no_update(vali): + has_update = True + id_str = stringify_id(speci["id"]) + prop = clean_property_name(speci["property"]) + component_ids[id_str][prop] = vali + else: + if output_value is not None: + raise InvalidCallbackReturnValue( + f"No-output callback received return value: {output_value}" + ) + output_value = [] + flat_output_values = [] + + if not long: + has_update = _set_side_update(callback_ctx, response) or has_update + + if not has_update: + raise PreventUpdate + + response["response"] = component_ids + + if len(ComponentRegistry.registry) != len(original_packages): + diff_packages = list( + set(ComponentRegistry.registry).difference(original_packages) + ) + if not allow_dynamic_callbacks: + raise ImportedInsideCallbackError( + f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" + "You can set `_allow_dynamic_callbacks` to allow for development purpose only." + ) + dist = app.get_dist(diff_packages) + response["dist"] = dist + + try: + jsonResponse = to_json(response) + except TypeError: + _validate.fail_callback_output(output_value, output) + + return jsonResponse + + def add_context(*args, **kwargs): output_spec = kwargs.pop("outputs_list") app_callback_manager = kwargs.pop("long_callback_manager", None) @@ -499,7 +725,7 @@ async def add_context(*args, **kwargs): return to_json(response) else: try: - output_value = await _invoke_callback(func, *func_args, **func_kwargs) + output_value = _invoke_callback(func, *func_args, **func_kwargs) except PreventUpdate as err: raise err except Exception as err: # pylint: disable=broad-exception-caught @@ -581,7 +807,10 @@ async def add_context(*args, **kwargs): return jsonResponse - callback_map[callback_id]["callback"] = add_context + if use_async: + callback_map[callback_id]["callback"] = async_add_context + else: + callback_map[callback_id]["callback"] = add_context return func diff --git a/dash/dash.py b/dash/dash.py index e783ca93be..df9638aa31 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -384,6 +384,10 @@ class Dash: an exception is raised. Receives the exception object as first argument. The callback_context can be used to access the original callback inputs, states and output. + + :param use_async: When True, the app will create async endpoints, as a dev, + they will be responsible for installing the `flask[async]` dependency. + :type use_async: boolean """ _plotlyjs_url: str @@ -431,6 +435,7 @@ def __init__( # pylint: disable=too-many-statements routing_callback_inputs: Optional[Dict[str, Union[Input, State]]] = None, description: Optional[str] = None, on_error: Optional[Callable[[Exception], Any]] = None, + use_async: Optional[bool] = False, **obsolete, ): _validate.check_obsolete(obsolete) @@ -544,6 +549,7 @@ def __init__( # pylint: disable=too-many-statements self.validation_layout = None self._on_error = on_error self._extra_components = [] + self._use_async = use_async self._setup_dev_tools() self._hot_reload = AttributeDict( @@ -681,7 +687,10 @@ def _setup_routes(self): ) self._add_url("_dash-layout", self.serve_layout) self._add_url("_dash-dependencies", self.dependencies) - self._add_url("_dash-update-component", self.dispatch, ["POST"]) + if self._use_async: + self._add_url("_dash-update-component", self.async_dispatch, ["POST"]) + else: + self._add_url("_dash-update-component", self.dispatch, ["POST"]) self._add_url("_reload-hash", self.serve_reload_hash) self._add_url("_favicon.ico", self._serve_default_favicon) self._add_url("", self.index) @@ -1276,7 +1285,7 @@ def long_callback( ) # pylint: disable=R0915 - async def dispatch(self): + async def async_dispatch(self): body = flask.request.get_json() g = AttributeDict({}) @@ -1391,6 +1400,7 @@ async def dispatch(self): callback_context=g, app=self, app_on_error=self._on_error, + app_use_async=self._use_async, ) response_data = await ctx.run(partial_func) @@ -1404,6 +1414,130 @@ async def dispatch(self): ) return response + def dispatch(self): + body = flask.request.get_json() + + g = AttributeDict({}) + + g.inputs_list = inputs = body.get( # pylint: disable=assigning-non-slot + "inputs", [] + ) + g.states_list = state = body.get( # pylint: disable=assigning-non-slot + "state", [] + ) + output = body["output"] + outputs_list = body.get("outputs") + g.outputs_list = outputs_list # pylint: disable=assigning-non-slot + + g.input_values = ( # pylint: disable=assigning-non-slot + input_values + ) = inputs_to_dict(inputs) + g.state_values = inputs_to_dict(state) # pylint: disable=assigning-non-slot + changed_props = body.get("changedPropIds", []) + g.triggered_inputs = [ # pylint: disable=assigning-non-slot + {"prop_id": x, "value": input_values.get(x)} for x in changed_props + ] + + response = ( + g.dash_response # pylint: disable=assigning-non-slot + ) = flask.Response(mimetype="application/json") + + args = inputs_to_vals(inputs + state) + + try: + cb = self.callback_map[output] + func = cb["callback"] + g.background_callback_manager = ( + cb.get("manager") or self._background_manager + ) + g.ignore_register_page = cb.get("long", False) + + # Add args_grouping + inputs_state_indices = cb["inputs_state_indices"] + inputs_state = inputs + state + inputs_state = convert_to_AttributeDict(inputs_state) + + if cb.get("no_output"): + outputs_list = [] + elif not outputs_list: + # FIXME Old renderer support? + split_callback_id(output) + + # update args_grouping attributes + for s in inputs_state: + # check for pattern matching: list of inputs or state + if isinstance(s, list): + for pattern_match_g in s: + update_args_group(pattern_match_g, changed_props) + update_args_group(s, changed_props) + + args_grouping = map_grouping( + lambda ind: inputs_state[ind], inputs_state_indices + ) + + g.args_grouping = args_grouping # pylint: disable=assigning-non-slot + g.using_args_grouping = ( # pylint: disable=assigning-non-slot + not isinstance(inputs_state_indices, int) + and ( + inputs_state_indices + != list(range(grouping_len(inputs_state_indices))) + ) + ) + + # Add outputs_grouping + outputs_indices = cb["outputs_indices"] + if not isinstance(outputs_list, list): + flat_outputs = [outputs_list] + else: + flat_outputs = outputs_list + + if len(flat_outputs) > 0: + outputs_grouping = map_grouping( + lambda ind: flat_outputs[ind], outputs_indices + ) + g.outputs_grouping = ( + outputs_grouping # pylint: disable=assigning-non-slot + ) + g.using_outputs_grouping = ( # pylint: disable=assigning-non-slot + not isinstance(outputs_indices, int) + and outputs_indices != list(range(grouping_len(outputs_indices))) + ) + else: + g.outputs_grouping = [] + g.using_outputs_grouping = [] + g.updated_props = {} + + g.cookies = dict(**flask.request.cookies) + g.headers = dict(**flask.request.headers) + g.path = flask.request.full_path + g.remote = flask.request.remote_addr + g.origin = flask.request.origin + + except KeyError as missing_callback_function: + msg = f"Callback function not found for output '{output}', perhaps you forgot to prepend the '@'?" + raise KeyError(msg) from missing_callback_function + + ctx = copy_context() + # Create a partial function with the necessary arguments + # noinspection PyArgumentList + partial_func = functools.partial( + func, + *args, + outputs_list=outputs_list, + long_callback_manager=self._background_manager, + callback_context=g, + app=self, + app_on_error=self._on_error, + app_use_async=self._use_async + ) + + response_data = ctx.run(partial_func) + + response.set_data( + response_data + ) + return response + def _setup_server(self): if self._got_first_request["setup_server"]: return @@ -2217,68 +2351,130 @@ def router(): } inputs.update(self.routing_callback_inputs) - @self.callback( - Output(_ID_CONTENT, "children"), - Output(_ID_STORE, "data"), - inputs=inputs, - prevent_initial_call=True, - ) - async def update(pathname_, search_, **states): - """ - Updates dash.page_container layout on page navigation. - Updates the stored page title which will trigger the clientside callback to update the app title - """ - - query_parameters = _parse_query_string(search_) - page, path_variables = _path_to_page( - self.strip_relative_path(pathname_) + if self._use_async: + @self.callback( + Output(_ID_CONTENT, "children"), + Output(_ID_STORE, "data"), + inputs=inputs, + prevent_initial_call=True, ) + async def update(pathname_, search_, **states): + """ + Updates dash.page_container layout on page navigation. + Updates the stored page title which will trigger the clientside callback to update the app title + """ + + query_parameters = _parse_query_string(search_) + page, path_variables = _path_to_page( + self.strip_relative_path(pathname_) + ) - # get layout - if page == {}: - for module, page in _pages.PAGE_REGISTRY.items(): - if module.split(".")[-1] == "not_found_404": - layout = page["layout"] - title = page["title"] - break + # get layout + if page == {}: + for module, page in _pages.PAGE_REGISTRY.items(): + if module.split(".")[-1] == "not_found_404": + layout = page["layout"] + title = page["title"] + break + else: + layout = html.H1("404 - Page not found") + title = self.title else: - layout = html.H1("404 - Page not found") - title = self.title - else: - layout = page.get("layout", "") - title = page["title"] - - if callable(layout): - layout = await execute_async_function(layout, - **{**(path_variables or {}), **query_parameters, **states} - ) - if callable(title): - title = await execute_async_function(title, - **(path_variables or {}) - ) - - return layout, {"title": title} - - _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) - _validate.validate_registry(_pages.PAGE_REGISTRY) - - # Set validation_layout - if not self.config.suppress_callback_exceptions: - self.validation_layout = html.Div( - [ - asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ - "layout"] - for page in _pages.PAGE_REGISTRY.values() + layout = page.get("layout", "") + title = page["title"] + + if callable(layout): + layout = await execute_async_function(layout, + **{**(path_variables or {}), **query_parameters, **states} + ) + if callable(title): + title = await execute_async_function(title, + **(path_variables or {}) + ) + + return layout, {"title": title} + + _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) + _validate.validate_registry(_pages.PAGE_REGISTRY) + + # Set validation_layout + if not self.config.suppress_callback_exceptions: + self.validation_layout = html.Div( + [ + asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ + "layout"] + for page in _pages.PAGE_REGISTRY.values() + ] + + [ + # pylint: disable=not-callable + self.layout() + if callable(self.layout) + else self.layout ] - + [ - # pylint: disable=not-callable - self.layout() - if callable(self.layout) - else self.layout - ] + ) + if _ID_CONTENT not in self.validation_layout: + raise Exception("`dash.page_container` not found in the layout") + else: + @self.callback( + Output(_ID_CONTENT, "children"), + Output(_ID_STORE, "data"), + inputs=inputs, + prevent_initial_call=True, ) - if _ID_CONTENT not in self.validation_layout: - raise Exception("`dash.page_container` not found in the layout") + def update(pathname_, search_, **states): + """ + Updates dash.page_container layout on page navigation. + Updates the stored page title which will trigger the clientside callback to update the app title + """ + + query_parameters = _parse_query_string(search_) + page, path_variables = _path_to_page( + self.strip_relative_path(pathname_) + ) + + # get layout + if page == {}: + for module, page in _pages.PAGE_REGISTRY.items(): + if module.split(".")[-1] == "not_found_404": + layout = page["layout"] + title = page["title"] + break + else: + layout = html.H1("404 - Page not found") + title = self.title + else: + layout = page.get("layout", "") + title = page["title"] + + if callable(layout): + layout = layout(**{**(path_variables or {}), **query_parameters, + **states}) + if callable(title): + title = title(**(path_variables or {})) + + return layout, {"title": title} + + _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) + _validate.validate_registry(_pages.PAGE_REGISTRY) + + # Set validation_layout + if not self.config.suppress_callback_exceptions: + self.validation_layout = html.Div( + [ + page["layout"]() if callable(page["layout"]) else + page[ + "layout"] + for page in _pages.PAGE_REGISTRY.values() + ] + + [ + # pylint: disable=not-callable + self.layout() + if callable(self.layout) + else self.layout + ] + ) + if _ID_CONTENT not in self.validation_layout: + raise Exception("`dash.page_container` not found in the layout") # Update the page title on page navigation self.clientside_callback( From 5d492dc6d613db5b173b53c08cff3330b315ee54 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:37:34 -0500 Subject: [PATCH 03/38] fixing issue with indentations of layouts --- dash/_callback.py | 6 ++- dash/dash.py | 119 +++++++++++++++++++++++----------------------- 2 files changed, 65 insertions(+), 60 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index fe1263031c..599148a50a 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -80,6 +80,7 @@ def callback( manager=None, cache_args_to_ignore=None, on_error: Optional[Callable[[Exception], Any]] = None, + use_async=False, **_kwargs, ): """ @@ -154,6 +155,8 @@ def callback( Function to call when the callback raises an exception. Receives the exception object as first argument. The callback_context can be used to access the original callback inputs, states and output. + :param use_async: + Tells the system to await for this async callback. """ long_spec = None @@ -204,6 +207,7 @@ def callback( manager=manager, running=running, on_error=on_error, + use_async=use_async ) @@ -324,7 +328,7 @@ def register_callback( manager = _kwargs.get("manager") running = _kwargs.get("running") on_error = _kwargs.get("on_error") - use_async = _kwargs.get("app_use_async") + use_async = _kwargs.get("use_async") if running is not None: if not isinstance(running[0], (list, tuple)): running = [running] diff --git a/dash/dash.py b/dash/dash.py index df9638aa31..66e585f468 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -2357,6 +2357,7 @@ def router(): Output(_ID_STORE, "data"), inputs=inputs, prevent_initial_call=True, + use_async=True ) async def update(pathname_, search_, **states): """ @@ -2383,37 +2384,37 @@ async def update(pathname_, search_, **states): layout = page.get("layout", "") title = page["title"] - if callable(layout): - layout = await execute_async_function(layout, - **{**(path_variables or {}), **query_parameters, **states} - ) - if callable(title): - title = await execute_async_function(title, - **(path_variables or {}) - ) - - return layout, {"title": title} - - _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) - _validate.validate_registry(_pages.PAGE_REGISTRY) - - # Set validation_layout - if not self.config.suppress_callback_exceptions: - self.validation_layout = html.Div( - [ - asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ - "layout"] - for page in _pages.PAGE_REGISTRY.values() - ] - + [ - # pylint: disable=not-callable - self.layout() - if callable(self.layout) - else self.layout + if callable(layout): + layout = await execute_async_function(layout, + **{**(path_variables or {}), **query_parameters, **states} + ) + if callable(title): + title = await execute_async_function(title, + **(path_variables or {}) + ) + + return layout, {"title": title} + + _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) + _validate.validate_registry(_pages.PAGE_REGISTRY) + + # Set validation_layout + if not self.config.suppress_callback_exceptions: + self.validation_layout = html.Div( + [ + asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ + "layout"] + for page in _pages.PAGE_REGISTRY.values() ] - ) - if _ID_CONTENT not in self.validation_layout: - raise Exception("`dash.page_container` not found in the layout") + + [ + # pylint: disable=not-callable + self.layout() + if callable(self.layout) + else self.layout + ] + ) + if _ID_CONTENT not in self.validation_layout: + raise Exception("`dash.page_container` not found in the layout") else: @self.callback( Output(_ID_CONTENT, "children"), @@ -2446,35 +2447,35 @@ def update(pathname_, search_, **states): layout = page.get("layout", "") title = page["title"] - if callable(layout): - layout = layout(**{**(path_variables or {}), **query_parameters, - **states}) - if callable(title): - title = title(**(path_variables or {})) - - return layout, {"title": title} - - _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) - _validate.validate_registry(_pages.PAGE_REGISTRY) - - # Set validation_layout - if not self.config.suppress_callback_exceptions: - self.validation_layout = html.Div( - [ - page["layout"]() if callable(page["layout"]) else - page[ - "layout"] - for page in _pages.PAGE_REGISTRY.values() - ] - + [ - # pylint: disable=not-callable - self.layout() - if callable(self.layout) - else self.layout - ] - ) - if _ID_CONTENT not in self.validation_layout: - raise Exception("`dash.page_container` not found in the layout") + if callable(layout): + layout = layout(**{**(path_variables or {}), **query_parameters, + **states}) + if callable(title): + title = title(**(path_variables or {})) + + return layout, {"title": title} + + _validate.check_for_duplicate_pathnames(_pages.PAGE_REGISTRY) + _validate.validate_registry(_pages.PAGE_REGISTRY) + + # Set validation_layout + if not self.config.suppress_callback_exceptions: + self.validation_layout = html.Div( + [ + page["layout"]() if callable(page["layout"]) else + page[ + "layout"] + for page in _pages.PAGE_REGISTRY.values() + ] + + [ + # pylint: disable=not-callable + self.layout() + if callable(self.layout) + else self.layout + ] + ) + if _ID_CONTENT not in self.validation_layout: + raise Exception("`dash.page_container` not found in the layout") # Update the page title on page navigation self.clientside_callback( From 69ee0690d831bfc705700b93a0826355d3263dca Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:51:38 -0500 Subject: [PATCH 04/38] removing needing to classify the function as async, instead I am looking at the function to determine --- dash/_callback.py | 6 +----- dash/dash.py | 3 +-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 599148a50a..5270e1e53e 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -155,8 +155,6 @@ def callback( Function to call when the callback raises an exception. Receives the exception object as first argument. The callback_context can be used to access the original callback inputs, states and output. - :param use_async: - Tells the system to await for this async callback. """ long_spec = None @@ -207,7 +205,6 @@ def callback( manager=manager, running=running, on_error=on_error, - use_async=use_async ) @@ -328,7 +325,6 @@ def register_callback( manager = _kwargs.get("manager") running = _kwargs.get("running") on_error = _kwargs.get("on_error") - use_async = _kwargs.get("use_async") if running is not None: if not isinstance(running[0], (list, tuple)): running = [running] @@ -811,7 +807,7 @@ def add_context(*args, **kwargs): return jsonResponse - if use_async: + if asyncio.iscoroutinefunction(func): callback_map[callback_id]["callback"] = async_add_context else: callback_map[callback_id]["callback"] = add_context diff --git a/dash/dash.py b/dash/dash.py index 66e585f468..058acc5566 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -2356,8 +2356,7 @@ def router(): Output(_ID_CONTENT, "children"), Output(_ID_STORE, "data"), inputs=inputs, - prevent_initial_call=True, - use_async=True + prevent_initial_call=True ) async def update(pathname_, search_, **states): """ From 950888cdcc9044c4387b673a3450149e393e379b Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:32:11 -0500 Subject: [PATCH 05/38] fixing for lint --- dash/_callback.py | 5 +++- dash/dash.py | 58 ++++++++++++++++++++++++----------------------- 2 files changed, 34 insertions(+), 29 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 5270e1e53e..2dde2f23a6 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -48,6 +48,7 @@ async def _async_invoke_callback(func, *args, **kwargs): # If the function is not a coroutine, call it directly return func(*args, **kwargs) + def _invoke_callback(func, *args, **kwargs): return func(*args, **kwargs) @@ -503,7 +504,9 @@ async def async_add_context(*args, **kwargs): return to_json(response) else: try: - output_value = await _async_invoke_callback(func, *func_args, **func_kwargs) + output_value = await _async_invoke_callback( + func, *func_args, **func_kwargs + ) except PreventUpdate as err: raise err except Exception as err: # pylint: disable=broad-exception-caught diff --git a/dash/dash.py b/dash/dash.py index 058acc5566..60a797c2f0 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -192,6 +192,7 @@ def _do_skip(error): # Singleton signal to not update an output, alternative to PreventUpdate no_update = _callback.NoUpdate() # pylint: disable=protected-access + async def execute_async_function(func, *args, **kwargs): # Check if the function is a coroutine function if asyncio.iscoroutinefunction(func): @@ -1409,9 +1410,7 @@ async def async_dispatch(self): if asyncio.iscoroutine(response_data): response_data = await response_data - response.set_data( - response_data - ) + response.set_data(response_data) return response def dispatch(self): @@ -1528,14 +1527,12 @@ def dispatch(self): callback_context=g, app=self, app_on_error=self._on_error, - app_use_async=self._use_async + app_use_async=self._use_async, ) response_data = ctx.run(partial_func) - response.set_data( - response_data - ) + response.set_data(response_data) return response def _setup_server(self): @@ -2352,11 +2349,12 @@ def router(): inputs.update(self.routing_callback_inputs) if self._use_async: + @self.callback( Output(_ID_CONTENT, "children"), Output(_ID_STORE, "data"), inputs=inputs, - prevent_initial_call=True + prevent_initial_call=True, ) async def update(pathname_, search_, **states): """ @@ -2384,13 +2382,14 @@ async def update(pathname_, search_, **states): title = page["title"] if callable(layout): - layout = await execute_async_function(layout, - **{**(path_variables or {}), **query_parameters, **states} - ) + layout = await execute_async_function( + layout, + **{**(path_variables or {}), **query_parameters, **states}, + ) if callable(title): - title = await execute_async_function(title, - **(path_variables or {}) - ) + title = await execute_async_function( + title, **(path_variables or {}) + ) return layout, {"title": title} @@ -2401,20 +2400,22 @@ async def update(pathname_, search_, **states): if not self.config.suppress_callback_exceptions: self.validation_layout = html.Div( [ - asyncio.run(execute_async_function(page["layout"])) if callable(page["layout"]) else page[ - "layout"] + asyncio.run(execute_async_function(page["layout"])) + if callable(page["layout"]) + else page["layout"] for page in _pages.PAGE_REGISTRY.values() ] - + [ - # pylint: disable=not-callable - self.layout() - if callable(self.layout) - else self.layout - ] - ) + + [ + # pylint: disable=not-callable + self.layout() + if callable(self.layout) + else self.layout + ] + ) if _ID_CONTENT not in self.validation_layout: raise Exception("`dash.page_container` not found in the layout") else: + @self.callback( Output(_ID_CONTENT, "children"), Output(_ID_STORE, "data"), @@ -2447,8 +2448,9 @@ def update(pathname_, search_, **states): title = page["title"] if callable(layout): - layout = layout(**{**(path_variables or {}), **query_parameters, - **states}) + layout = layout( + **{**(path_variables or {}), **query_parameters, **states} + ) if callable(title): title = title(**(path_variables or {})) @@ -2461,9 +2463,9 @@ def update(pathname_, search_, **states): if not self.config.suppress_callback_exceptions: self.validation_layout = html.Div( [ - page["layout"]() if callable(page["layout"]) else - page[ - "layout"] + page["layout"]() + if callable(page["layout"]) + else page["layout"] for page in _pages.PAGE_REGISTRY.values() ] + [ From 916efdc5df1ce33ff9acfe65dd034a9d3ea5f2cd Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 10:59:53 -0500 Subject: [PATCH 06/38] adjustments for the test for the debugger, making test more robust for testing and not relying on a specific string to be present as a comment --- dash/_callback.py | 12 +++++++----- dash/dash.py | 12 ++++++++++-- .../devtools/test_devtools_error_handling.py | 8 ++++---- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 2dde2f23a6..f74b7928bb 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -40,17 +40,19 @@ from ._callback_context import context_value -async def _async_invoke_callback(func, *args, **kwargs): +async def _async_invoke_callback( + func, *args, **kwargs +): # used to mark the frame for the debugger # Check if the function is a coroutine function if asyncio.iscoroutinefunction(func): - return await func(*args, **kwargs) + return await func(*args, **kwargs) # %% callback invoked %% else: # If the function is not a coroutine, call it directly - return func(*args, **kwargs) + return func(*args, **kwargs) # %% callback invoked %% -def _invoke_callback(func, *args, **kwargs): - return func(*args, **kwargs) +def _invoke_callback(func, *args, **kwargs): # used to mark the frame for the debugger + return func(*args, **kwargs) # %% callback invoked %% class NoUpdate: diff --git a/dash/dash.py b/dash/dash.py index 60a797c2f0..738edf42e8 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -150,6 +150,7 @@ def _get_traceback(secret, error: Exception): def _get_skip(error): from dash._callback import ( # pylint: disable=import-outside-toplevel _invoke_callback, + _async_invoke_callback, ) tb = error.__traceback__ @@ -157,7 +158,10 @@ def _get_skip(error): while tb.tb_next is not None: skip += 1 tb = tb.tb_next - if tb.tb_frame.f_code is _invoke_callback.__code__: + if tb.tb_frame.f_code in [ + _invoke_callback.__code__, + _async_invoke_callback.__code__, + ]: return skip return skip @@ -165,11 +169,15 @@ def _get_skip(error): def _do_skip(error): from dash._callback import ( # pylint: disable=import-outside-toplevel _invoke_callback, + _async_invoke_callback, ) tb = error.__traceback__ while tb.tb_next is not None: - if tb.tb_frame.f_code is _invoke_callback.__code__: + if tb.tb_frame.f_code in [ + _invoke_callback.__code__, + _async_invoke_callback.__code__, + ]: return tb.tb_next tb = tb.tb_next return error.__traceback__ diff --git a/tests/integration/devtools/test_devtools_error_handling.py b/tests/integration/devtools/test_devtools_error_handling.py index fa51cda9d3..3161b8d7ea 100644 --- a/tests/integration/devtools/test_devtools_error_handling.py +++ b/tests/integration/devtools/test_devtools_error_handling.py @@ -72,14 +72,14 @@ def test_dveh001_python_errors(dash_duo): assert "Special 2 clicks exception" in error0 assert "in bad_sub" not in error0 # dash and flask part of the traceback not included - assert "%% callback invoked %%" not in error0 + assert "dash.py" not in error0 assert "self.wsgi_app" not in error0 error1 = get_error_html(dash_duo, 1) assert "in update_output" in error1 assert "in bad_sub" in error1 assert "ZeroDivisionError" in error1 - assert "%% callback invoked %%" not in error1 + assert "dash.py" not in error1 assert "self.wsgi_app" not in error1 @@ -108,14 +108,14 @@ def test_dveh006_long_python_errors(dash_duo): assert "in bad_sub" not in error0 # dash and flask part of the traceback ARE included # since we set dev_tools_prune_errors=False - assert "%% callback invoked %%" in error0 + assert "dash.py" in error0 assert "self.wsgi_app" in error0 error1 = get_error_html(dash_duo, 1) assert "in update_output" in error1 assert "in bad_sub" in error1 assert "ZeroDivisionError" in error1 - assert "%% callback invoked %%" in error1 + assert "dash.py" in error1 assert "self.wsgi_app" in error1 From 0dd778e46001bed6d62a249913b4eefdbe9e02c7 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 11:18:57 -0500 Subject: [PATCH 07/38] fixing lint issues --- dash/_callback.py | 8 +++----- dash/dash.py | 7 +++---- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index f74b7928bb..80557550b9 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -3,8 +3,8 @@ from functools import wraps from typing import Callable, Optional, Any -import flask import asyncio +import flask from .dependencies import ( handle_callback_args, @@ -46,9 +46,8 @@ async def _async_invoke_callback( # Check if the function is a coroutine function if asyncio.iscoroutinefunction(func): return await func(*args, **kwargs) # %% callback invoked %% - else: - # If the function is not a coroutine, call it directly - return func(*args, **kwargs) # %% callback invoked %% + # If the function is not a coroutine, call it directly + return func(*args, **kwargs) # %% callback invoked %% def _invoke_callback(func, *args, **kwargs): # used to mark the frame for the debugger @@ -83,7 +82,6 @@ def callback( manager=None, cache_args_to_ignore=None, on_error: Optional[Callable[[Exception], Any]] = None, - use_async=False, **_kwargs, ): """ diff --git a/dash/dash.py b/dash/dash.py index 738edf42e8..877cd708d7 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -18,8 +18,8 @@ from urllib.parse import urlparse from typing import Any, Callable, Dict, Optional, Union, List -import flask import asyncio +import flask from importlib_metadata import version as _get_distribution_version @@ -205,9 +205,8 @@ async def execute_async_function(func, *args, **kwargs): # Check if the function is a coroutine function if asyncio.iscoroutinefunction(func): return await func(*args, **kwargs) - else: - # If the function is not a coroutine, call it directly - return func(*args, **kwargs) + # If the function is not a coroutine, call it directly + return func(*args, **kwargs) # pylint: disable=too-many-instance-attributes From 2d9a930189f3eb4ba88859998ac3ecdd64e97dfc Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 12:05:30 -0500 Subject: [PATCH 08/38] Adjustments for `use_async` and determining whether the app can be use as `async`. Added more defined error messages when libraries are missing or async was used without async turned on. --- dash/dash.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/dash/dash.py b/dash/dash.py index 877cd708d7..2cb0a5d416 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -443,9 +443,25 @@ def __init__( # pylint: disable=too-many-statements routing_callback_inputs: Optional[Dict[str, Union[Input, State]]] = None, description: Optional[str] = None, on_error: Optional[Callable[[Exception], Any]] = None, - use_async: Optional[bool] = False, + use_async: Optional[bool] = None, **obsolete, ): + + if use_async is None: + try: + import asgiref + + use_async = True + except ImportError: + pass + elif use_async: + try: + import asgiref + except ImportError: + raise Exception( + "You are trying to use dash[async] without having installed the requirements please install via: `pip install dash[async]`" + ) + _validate.check_obsolete(obsolete) caller_name = None if name else get_caller_name() @@ -1539,6 +1555,11 @@ def dispatch(self): response_data = ctx.run(partial_func) + if asyncio.iscoroutine(response_data): + raise Exception( + "You are trying to use a coroutine without dash[async], please install the dependencies via `pip install dash[async]` and make sure you arent passing `use_async=False` to the app." + ) + response.set_data(response_data) return response From b2f9cd6f970b6d28f5aeedfdfdbd12fdc2c93a40 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 12:21:35 -0500 Subject: [PATCH 09/38] disable lint for unused import on `asgiref` --- dash/dash.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index 2cb0a5d416..e745e0582a 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -449,14 +449,14 @@ def __init__( # pylint: disable=too-many-statements if use_async is None: try: - import asgiref + import asgiref # pylint: disable=unused-import use_async = True except ImportError: pass elif use_async: try: - import asgiref + import asgiref # pylint: disable=unused-import except ImportError: raise Exception( "You are trying to use dash[async] without having installed the requirements please install via: `pip install dash[async]`" From f1ac667ec661c5fab3fb259c04256130c2b7e51e Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 12:35:25 -0500 Subject: [PATCH 10/38] adjustments for formatting --- dash/dash.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index e745e0582a..0f0415bc15 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -449,14 +449,14 @@ def __init__( # pylint: disable=too-many-statements if use_async is None: try: - import asgiref # pylint: disable=unused-import + import asgiref # pylint: disable=unused-import use_async = True except ImportError: pass elif use_async: try: - import asgiref # pylint: disable=unused-import + import asgiref # pylint: disable=unused-import except ImportError: raise Exception( "You are trying to use dash[async] without having installed the requirements please install via: `pip install dash[async]`" From 52de22e5adf4c562c01fc64a243ac29e3d3fb420 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:01:25 -0500 Subject: [PATCH 11/38] adding additional ignore for `flake8` --- dash/dash.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index dd21cc5300..2fec10621c 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -449,14 +449,14 @@ def __init__( # pylint: disable=too-many-statements if use_async is None: try: - import asgiref # pylint: disable=unused-import + import asgiref # pylint: disable=unused-import # noqa use_async = True except ImportError: pass elif use_async: try: - import asgiref # pylint: disable=unused-import + import asgiref # pylint: disable=unused-import # noqa except ImportError: raise Exception( "You are trying to use dash[async] without having installed the requirements please install via: `pip install dash[async]`" From 4189ed6f35dbf263f6bbe092d36579f6f5c13e5f Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:44:36 -0500 Subject: [PATCH 12/38] more adjustments for lint --- dash/dash.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index 2fec10621c..a942a6d3fb 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -449,18 +449,18 @@ def __init__( # pylint: disable=too-many-statements if use_async is None: try: - import asgiref # pylint: disable=unused-import # noqa + import asgiref # pylint: disable=unused-import, import-outside-toplevel # noqa use_async = True except ImportError: pass elif use_async: try: - import asgiref # pylint: disable=unused-import # noqa - except ImportError: + import asgiref # pylint: disable=unused-import, import-outside-toplevel # noqa + except ImportError as exc: raise Exception( "You are trying to use dash[async] without having installed the requirements please install via: `pip install dash[async]`" - ) + ) from exc _validate.check_obsolete(obsolete) From 0ddf2546994e32cb177fc616b654dd0478d0f820 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:07:05 -0500 Subject: [PATCH 13/38] adding `dash[async]` --- requirements/async.txt | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 requirements/async.txt diff --git a/requirements/async.txt b/requirements/async.txt new file mode 100644 index 0000000000..fafa8e7e6e --- /dev/null +++ b/requirements/async.txt @@ -0,0 +1 @@ +flask[async] diff --git a/setup.py b/setup.py index ea616e2a18..7ed781c20d 100644 --- a/setup.py +++ b/setup.py @@ -29,6 +29,7 @@ def read_req_file(req_type): install_requires=read_req_file("install"), python_requires=">=3.8", extras_require={ + "async": read_req_file("async"), "ci": read_req_file("ci"), "dev": read_req_file("dev"), "testing": read_req_file("testing"), From 337ec26cae907955c0f3f9e0c44b90a1c9520713 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 3 Dec 2024 21:52:32 -0500 Subject: [PATCH 14/38] attempt no 1 for refactoring dash for `dispatch` -- failing lint on purpose for test --- dash/dash.py | 281 ++++++++++++++------------------------------------- 1 file changed, 76 insertions(+), 205 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index a942a6d3fb..d64ca96edc 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -1309,259 +1309,130 @@ def long_callback( ) # pylint: disable=R0915 - async def async_dispatch(self): - body = flask.request.get_json() - + def _initialize_context(self, body): + """Initialize the global context for the request.""" g = AttributeDict({}) - - g.inputs_list = inputs = body.get( # pylint: disable=assigning-non-slot - "inputs", [] - ) - g.states_list = state = body.get( # pylint: disable=assigning-non-slot - "state", [] - ) - output = body["output"] - outputs_list = body.get("outputs") - g.outputs_list = outputs_list # pylint: disable=assigning-non-slot - - g.input_values = ( # pylint: disable=assigning-non-slot - input_values - ) = inputs_to_dict(inputs) - g.state_values = inputs_to_dict(state) # pylint: disable=assigning-non-slot - changed_props = body.get("changedPropIds", []) - g.triggered_inputs = [ # pylint: disable=assigning-non-slot - {"prop_id": x, "value": input_values.get(x)} for x in changed_props + g.inputs_list = body.get("inputs", []) + g.states_list = body.get("state", []) + g.outputs_list = body.get("outputs", []) + g.input_values = inputs_to_dict(g.inputs_list) + g.state_values = inputs_to_dict(g.states_list) + g.triggered_inputs = [ + {"prop_id": x, "value": g.input_values.get(x)} + for x in body.get("changedPropIds", []) ] - - response = ( - g.dash_response # pylint: disable=assigning-non-slot - ) = flask.Response(mimetype="application/json") - - args = inputs_to_vals(inputs + state) - + g.dash_response = flask.Response(mimetype="application/json") + g.cookies = dict(**flask.request.cookies) + g.headers = dict(**flask.request.headers) + g.path = flask.request.full_path + g.remote = flask.request.remote_addr + g.origin = flask.request.origin + g.updated_props = {} + return g + + def _prepare_callback(self, g, body): + """Prepare callback-related data.""" + output = body["output"] try: cb = self.callback_map[output] func = cb["callback"] - g.background_callback_manager = ( - cb.get("manager") or self._background_manager - ) + g.background_callback_manager = cb.get("manager") or self._background_manager g.ignore_register_page = cb.get("long", False) # Add args_grouping inputs_state_indices = cb["inputs_state_indices"] - inputs_state = inputs + state - inputs_state = convert_to_AttributeDict(inputs_state) + inputs_state = convert_to_AttributeDict(g.inputs_list + g.states_list) - if cb.get("no_output"): - outputs_list = [] - elif not outputs_list: - # FIXME Old renderer support? + # Legacy support for older renderers + if not g.outputs_list: split_callback_id(output) - # update args_grouping attributes + # Update args_grouping attributes for s in inputs_state: # check for pattern matching: list of inputs or state if isinstance(s, list): for pattern_match_g in s: - update_args_group(pattern_match_g, changed_props) - update_args_group(s, changed_props) + update_args_group(pattern_match_g, body.get("changedPropIds", [])) + update_args_group(s, body.get("changedPropIds", [])) - args_grouping = map_grouping( - lambda ind: inputs_state[ind], inputs_state_indices + g.args_grouping, g.using_args_grouping = self._prepare_grouping( + inputs_state, inputs_state_indices ) - - g.args_grouping = args_grouping # pylint: disable=assigning-non-slot - g.using_args_grouping = ( # pylint: disable=assigning-non-slot - not isinstance(inputs_state_indices, int) - and ( - inputs_state_indices - != list(range(grouping_len(inputs_state_indices))) - ) + g.outputs_grouping, g.using_outputs_grouping = self._prepare_grouping( + g.outputs_list, cb.get("outputs_indices", []) ) + except KeyError as e: + raise KeyError(f"Callback function not found for output '{output}'.") from e + return func + + def _prepare_grouping(self, data_list, indices): + """Prepare grouping logic for inputs or outputs.""" + if not isinstance(data_list, list): + flat_data = [data_list] + else: + flat_data = data_list - # Add outputs_grouping - outputs_indices = cb["outputs_indices"] - if not isinstance(outputs_list, list): - flat_outputs = [outputs_list] - else: - flat_outputs = outputs_list - - if len(flat_outputs) > 0: - outputs_grouping = map_grouping( - lambda ind: flat_outputs[ind], outputs_indices - ) - g.outputs_grouping = ( - outputs_grouping # pylint: disable=assigning-non-slot - ) - g.using_outputs_grouping = ( # pylint: disable=assigning-non-slot - not isinstance(outputs_indices, int) - and outputs_indices != list(range(grouping_len(outputs_indices))) - ) - else: - g.outputs_grouping = [] - g.using_outputs_grouping = [] - g.updated_props = {} - - g.cookies = dict(**flask.request.cookies) - g.headers = dict(**flask.request.headers) - g.path = flask.request.full_path - g.remote = flask.request.remote_addr - g.origin = flask.request.origin + if len(flat_data) > 0: + grouping = map_grouping(lambda ind: flat_data[ind], indices) + using_grouping = not isinstance(indices, int) and indices != list(range(grouping_len(indices))) + else: + grouping, using_grouping = [], False - except KeyError as missing_callback_function: - msg = f"Callback function not found for output '{output}', perhaps you forgot to prepend the '@'?" - raise KeyError(msg) from missing_callback_function + return grouping, using_grouping - ctx = copy_context() - # Create a partial function with the necessary arguments + def _execute_callback(self, func, args, outputs_list, g): + """Execute the callback with the prepared arguments.""" # noinspection PyArgumentList partial_func = functools.partial( - execute_async_function, func, *args, outputs_list=outputs_list, - long_callback_manager=self._background_manager, + long_callback_manager=g.background_callback_manager, callback_context=g, app=self, app_on_error=self._on_error, app_use_async=self._use_async, ) + return partial_func + + async def async_dispatch(self): + body = flask.request.get_json() + g = self._initialize_context(body) + func = self._prepare_callback(g, body) + args = inputs_to_vals(g.inputs_list + g.states_list) - response_data = await ctx.run(partial_func) + ctx = copy_context() + partial_func = self._execute_callback(func, args, g.outputs_list, g) + if asyncio.iscoroutine(func): + response_data = await ctx.run(partial_func) + else: + response_data = ctx.run(partial_func) - # Check if the response is a coroutine if asyncio.iscoroutine(response_data): response_data = await response_data - response.set_data(response_data) - return response + g.dash_response.set_data(response_data) + return g.dash_response def dispatch(self): body = flask.request.get_json() - - g = AttributeDict({}) - - g.inputs_list = inputs = body.get( # pylint: disable=assigning-non-slot - "inputs", [] - ) - g.states_list = state = body.get( # pylint: disable=assigning-non-slot - "state", [] - ) - output = body["output"] - outputs_list = body.get("outputs") - g.outputs_list = outputs_list # pylint: disable=assigning-non-slot - - g.input_values = ( # pylint: disable=assigning-non-slot - input_values - ) = inputs_to_dict(inputs) - g.state_values = inputs_to_dict(state) # pylint: disable=assigning-non-slot - changed_props = body.get("changedPropIds", []) - g.triggered_inputs = [ # pylint: disable=assigning-non-slot - {"prop_id": x, "value": input_values.get(x)} for x in changed_props - ] - - response = ( - g.dash_response # pylint: disable=assigning-non-slot - ) = flask.Response(mimetype="application/json") - - args = inputs_to_vals(inputs + state) - - try: - cb = self.callback_map[output] - func = cb["callback"] - g.background_callback_manager = ( - cb.get("manager") or self._background_manager - ) - g.ignore_register_page = cb.get("long", False) - - # Add args_grouping - inputs_state_indices = cb["inputs_state_indices"] - inputs_state = inputs + state - inputs_state = convert_to_AttributeDict(inputs_state) - - if cb.get("no_output"): - outputs_list = [] - elif not outputs_list: - # FIXME Old renderer support? - split_callback_id(output) - - # update args_grouping attributes - for s in inputs_state: - # check for pattern matching: list of inputs or state - if isinstance(s, list): - for pattern_match_g in s: - update_args_group(pattern_match_g, changed_props) - update_args_group(s, changed_props) - - args_grouping = map_grouping( - lambda ind: inputs_state[ind], inputs_state_indices - ) - - g.args_grouping = args_grouping # pylint: disable=assigning-non-slot - g.using_args_grouping = ( # pylint: disable=assigning-non-slot - not isinstance(inputs_state_indices, int) - and ( - inputs_state_indices - != list(range(grouping_len(inputs_state_indices))) - ) - ) - - # Add outputs_grouping - outputs_indices = cb["outputs_indices"] - if not isinstance(outputs_list, list): - flat_outputs = [outputs_list] - else: - flat_outputs = outputs_list - - if len(flat_outputs) > 0: - outputs_grouping = map_grouping( - lambda ind: flat_outputs[ind], outputs_indices - ) - g.outputs_grouping = ( - outputs_grouping # pylint: disable=assigning-non-slot - ) - g.using_outputs_grouping = ( # pylint: disable=assigning-non-slot - not isinstance(outputs_indices, int) - and outputs_indices != list(range(grouping_len(outputs_indices))) - ) - else: - g.outputs_grouping = [] - g.using_outputs_grouping = [] - g.updated_props = {} - - g.cookies = dict(**flask.request.cookies) - g.headers = dict(**flask.request.headers) - g.path = flask.request.full_path - g.remote = flask.request.remote_addr - g.origin = flask.request.origin - - except KeyError as missing_callback_function: - msg = f"Callback function not found for output '{output}', perhaps you forgot to prepend the '@'?" - raise KeyError(msg) from missing_callback_function + g = self._initialize_context(body) + func = self._prepare_callback(g, body) + args = inputs_to_vals(g.inputs_list + g.states_list) ctx = copy_context() - # Create a partial function with the necessary arguments - # noinspection PyArgumentList - partial_func = functools.partial( - func, - *args, - outputs_list=outputs_list, - long_callback_manager=self._background_manager, - callback_context=g, - app=self, - app_on_error=self._on_error, - app_use_async=self._use_async, - ) - + partial_func = self._execute_callback(func, args, g.outputs_list, g) response_data = ctx.run(partial_func) if asyncio.iscoroutine(response_data): raise Exception( - "You are trying to use a coroutine without dash[async], please install the dependencies via `pip install dash[async]` and make sure you arent passing `use_async=False` to the app." + "You are trying to use a coroutine without dash[async]. " + "Please install the dependencies via `pip install dash[async]` and ensure " + "that `use_async=False` is not being passed to the app." ) - response.set_data(response_data) - return response + g.dash_response.set_data(response_data) + return g.dash_response def _setup_server(self): if self._got_first_request["setup_server"]: From a730b4b8838a021fc16d600bb7b7359a22b01257 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 3 Dec 2024 22:07:51 -0500 Subject: [PATCH 15/38] fixing for lint --- dash/dash.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index d64ca96edc..d117310c4f 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -1336,7 +1336,9 @@ def _prepare_callback(self, g, body): try: cb = self.callback_map[output] func = cb["callback"] - g.background_callback_manager = cb.get("manager") or self._background_manager + g.background_callback_manager = ( + cb.get("manager") or self._background_manager + ) g.ignore_register_page = cb.get("long", False) # Add args_grouping @@ -1352,7 +1354,9 @@ def _prepare_callback(self, g, body): # check for pattern matching: list of inputs or state if isinstance(s, list): for pattern_match_g in s: - update_args_group(pattern_match_g, body.get("changedPropIds", [])) + update_args_group( + pattern_match_g, body.get("changedPropIds", []) + ) update_args_group(s, body.get("changedPropIds", [])) g.args_grouping, g.using_args_grouping = self._prepare_grouping( @@ -1374,7 +1378,9 @@ def _prepare_grouping(self, data_list, indices): if len(flat_data) > 0: grouping = map_grouping(lambda ind: flat_data[ind], indices) - using_grouping = not isinstance(indices, int) and indices != list(range(grouping_len(indices))) + using_grouping = not isinstance(indices, int) and indices != list( + range(grouping_len(indices)) + ) else: grouping, using_grouping = [], False From 538515da2240920683b44628940df86967ed1d9b Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Wed, 4 Dec 2024 08:43:12 -0500 Subject: [PATCH 16/38] fixing no outputs --- dash/dash.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dash/dash.py b/dash/dash.py index d117310c4f..4ccab905f9 100644 --- a/dash/dash.py +++ b/dash/dash.py @@ -1345,8 +1345,10 @@ def _prepare_callback(self, g, body): inputs_state_indices = cb["inputs_state_indices"] inputs_state = convert_to_AttributeDict(g.inputs_list + g.states_list) - # Legacy support for older renderers - if not g.outputs_list: + if cb.get("no_output"): + g.outputs_list = [] + elif not g.outputs_list: + # Legacy support for older renderers split_callback_id(output) # Update args_grouping attributes From 4f14a1ad526f0f1d70c3e8d0542776ec7e2e40b9 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Wed, 4 Dec 2024 16:00:35 -0500 Subject: [PATCH 17/38] attempt no 1 for refactoring callbacks --- dash/_callback.py | 661 ++++++++++++++++++---------------------------- 1 file changed, 251 insertions(+), 410 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 80557550b9..6f601d0ac9 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -353,456 +353,297 @@ def register_callback( no_output=not has_output, ) - # pylint: disable=too-many-locals - def wrap_func(func): - - if long is not None: - long_key = BaseLongCallbackManager.register_func( - func, - long.get("progress") is not None, - callback_id, - ) - - @wraps(func) - async def async_add_context(*args, **kwargs): - output_spec = kwargs.pop("outputs_list") - app_callback_manager = kwargs.pop("long_callback_manager", None) - - callback_ctx = kwargs.pop( - "callback_context", AttributeDict({"updated_props": {}}) - ) - app = kwargs.pop("app", None) - callback_manager = long and long.get("manager", app_callback_manager) - error_handler = on_error or kwargs.pop("app_on_error", None) - original_packages = set(ComponentRegistry.registry) - - if has_output: - _validate.validate_output_spec(insert_output, output_spec, Output) + def initialize_context(args, kwargs, inputs_state_indices): + """Initialize context and validate output specifications.""" + app = kwargs.pop("app", None) + output_spec = kwargs.pop("outputs_list", []) + callback_ctx = kwargs.pop( + "callback_context", AttributeDict({"updated_props": {}}) + ) + context_value.set(callback_ctx) + has_output = False + if len(output_spec) > 0: + has_output = True - context_value.set(callback_ctx) + if has_output: + _validate.validate_output_spec(insert_output, output_spec, Output) - func_args, func_kwargs = _validate.validate_and_group_input_args( - args, inputs_state_indices + func_args, func_kwargs = _validate.validate_and_group_input_args( + args, inputs_state_indices + ) + return output_spec, callback_ctx, func_args, func_kwargs, app + + def handle_long_callback( + kwargs, long, long_key, response, error_handler, func, func_args, func_kwargs + ): + """Set up the long callback and manage jobs.""" + callback_manager = long.get( + "manager", kwargs.pop("long_callback_manager", None) + ) + if not callback_manager: + raise MissingLongCallbackManagerError( + "Running `long` callbacks requires a manager to be installed.\n" + "Available managers:\n" + "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" + " and store results on the local filesystem.\n" + "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" + " and store results on redis.\n" ) - response: dict = {"multi": True} - has_update = False - - if long is not None: - if not callback_manager: - raise MissingLongCallbackManagerError( - "Running `long` callbacks requires a manager to be installed.\n" - "Available managers:\n" - "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" - " and store results on the local filesystem.\n" - "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" - " and store results on redis.\n" - ) + progress_outputs = long.get("progress") + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") + old_job = flask.request.args.getlist("oldJob") - progress_outputs = long.get("progress") - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") - old_job = flask.request.args.getlist("oldJob") + current_key = callback_manager.build_cache_key( + func, + # Inputs provided as dict is kwargs. + func_args if func_args else func_kwargs, + long.get("cache_args_to_ignore", []), + ) - current_key = callback_manager.build_cache_key( - func, - # Inputs provided as dict is kwargs. - func_args if func_args else func_kwargs, - long.get("cache_args_to_ignore", []), - ) + if old_job: + for job in old_job: + callback_manager.terminate_job(job) - if old_job: - for job in old_job: - callback_manager.terminate_job(job) + if not cache_key: + cache_key = current_key - if not cache_key: - cache_key = current_key + job_fn = callback_manager.func_registry.get(long_key) - job_fn = callback_manager.func_registry.get(long_key) + ctx_value = AttributeDict(**context_value.get()) + ctx_value.ignore_register_page = True + ctx_value.pop("background_callback_manager") + ctx_value.pop("dash_response") - ctx_value = AttributeDict(**context_value.get()) - ctx_value.ignore_register_page = True - ctx_value.pop("background_callback_manager") - ctx_value.pop("dash_response") + job = callback_manager.call_job_fn( + cache_key, + job_fn, + func_args if func_args else func_kwargs, + ctx_value, + ) - job = callback_manager.call_job_fn( - cache_key, - job_fn, - func_args if func_args else func_kwargs, - ctx_value, - ) + data = { + "cacheKey": cache_key, + "job": job, + } + + cancel = long.get("cancel") + if cancel: + data["cancel"] = cancel + + progress_default = long.get("progressDefault") + if progress_default: + data["progressDefault"] = { + str(o): x for o, x in zip(progress_outputs, progress_default) + } + return to_json(data), True + if progress_outputs: + # Get the progress before the result as it would be erased after the results. + progress = callback_manager.get_progress(cache_key) + if progress: + response["progress"] = { + str(x): progress[i] for i, x in enumerate(progress_outputs) + } + + output_value = callback_manager.get_result(cache_key, job_id) + # Must get job_running after get_result since get_results terminates it. + job_running = callback_manager.job_running(job_id) + if not job_running and output_value is callback_manager.UNDEFINED: + # Job canceled -> no output to close the loop. + output_value = NoUpdate() + + elif isinstance(output_value, dict) and "long_callback_error" in output_value: + error = output_value.get("long_callback_error", {}) + exc = LongCallbackError( + f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" + ) + if error_handler: + output_value = error_handler(exc) - data = { - "cacheKey": cache_key, - "job": job, - } - - cancel = long.get("cancel") - if cancel: - data["cancel"] = cancel - - progress_default = long.get("progressDefault") - if progress_default: - data["progressDefault"] = { - str(o): x - for o, x in zip(progress_outputs, progress_default) - } - return to_json(data) - if progress_outputs: - # Get the progress before the result as it would be erased after the results. - progress = callback_manager.get_progress(cache_key) - if progress: - response["progress"] = { - str(x): progress[i] for i, x in enumerate(progress_outputs) - } - - output_value = callback_manager.get_result(cache_key, job_id) - # Must get job_running after get_result since get_results terminates it. - job_running = callback_manager.job_running(job_id) - if not job_running and output_value is callback_manager.UNDEFINED: - # Job canceled -> no output to close the loop. + if output_value is None: output_value = NoUpdate() - - elif ( - isinstance(output_value, dict) - and "long_callback_error" in output_value - ): - error = output_value.get("long_callback_error", {}) - exc = LongCallbackError( - f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" - ) - if error_handler: - output_value = error_handler(exc) - - if output_value is None: - output_value = NoUpdate() - # set_props from the error handler uses the original ctx - # instead of manager.get_updated_props since it runs in the - # request process. - has_update = ( - _set_side_update(callback_ctx, response) - or output_value is not None - ) - else: - raise exc - - if job_running and output_value is not callback_manager.UNDEFINED: - # cached results. - callback_manager.terminate_job(job_id) - - if multi and isinstance(output_value, (list, tuple)): - output_value = [ - NoUpdate() if NoUpdate.is_no_update(r) else r - for r in output_value - ] - updated_props = callback_manager.get_updated_props(cache_key) - if len(updated_props) > 0: - response["sideUpdate"] = updated_props - has_update = True - - if output_value is callback_manager.UNDEFINED: - return to_json(response) else: - try: - output_value = await _async_invoke_callback( - func, *func_args, **func_kwargs - ) - except PreventUpdate as err: - raise err - except Exception as err: # pylint: disable=broad-exception-caught - if error_handler: - output_value = error_handler(err) - - # If the error returns nothing, automatically puts NoUpdate for response. - if output_value is None and has_output: - output_value = NoUpdate() - else: - raise err - - component_ids = collections.defaultdict(dict) - - if has_output: - if not multi: - output_value, output_spec = [output_value], [output_spec] - flat_output_values = output_value - else: - if isinstance(output_value, (list, tuple)): - # For multi-output, allow top-level collection to be - # list or tuple - output_value = list(output_value) - - if NoUpdate.is_no_update(output_value): - flat_output_values = [output_value] - else: - # Flatten grouping and validate grouping structure - flat_output_values = flatten_grouping(output_value, output) - - if not NoUpdate.is_no_update(output_value): - _validate.validate_multi_return( - output_spec, flat_output_values, callback_id - ) - - for val, spec in zip(flat_output_values, output_spec): - if NoUpdate.is_no_update(val): - continue - for vali, speci in ( - zip(val, spec) if isinstance(spec, list) else [[val, spec]] - ): - if not NoUpdate.is_no_update(vali): - has_update = True - id_str = stringify_id(speci["id"]) - prop = clean_property_name(speci["property"]) - component_ids[id_str][prop] = vali + raise exc + + if job_running and output_value is not callback_manager.UNDEFINED: + # cached results. + callback_manager.terminate_job(job_id) + + if multi and isinstance(output_value, (list, tuple)): + output_value = [ + NoUpdate() if NoUpdate.is_no_update(r) else r for r in output_value + ] + updated_props = callback_manager.get_updated_props(cache_key) + if len(updated_props) > 0: + response["sideUpdate"] = updated_props + + if output_value is callback_manager.UNDEFINED: + return to_json(response), True + return output_value, False + + def prepare_response(output_value, output_spec, multi, response, callback_ctx, app): + """Prepare the response object based on the callback output.""" + component_ids = collections.defaultdict(dict) + original_packages = set(ComponentRegistry.registry) + + if output_spec: + if not multi: + output_value, output_spec = [output_value], [output_spec] + flat_output_values = output_value else: - if output_value is not None: - raise InvalidCallbackReturnValue( - f"No-output callback received return value: {output_value}" - ) - output_value = [] - flat_output_values = [] + if isinstance(output_value, (list, tuple)): + output_value = list(output_value) + flat_output_values = flatten_grouping(output_value, output_spec) + + for val, spec in zip(flat_output_values, output_spec): + if NoUpdate.is_no_update(val): + continue + for vali, speci in ( + zip(val, spec) if isinstance(spec, list) else [[val, spec]] + ): + if not NoUpdate.is_no_update(vali): + id_str = stringify_id(speci["id"]) + prop = clean_property_name(speci["property"]) + component_ids[id_str][prop] = vali - if not long: - has_update = _set_side_update(callback_ctx, response) or has_update + else: + if output_value is not None: + raise InvalidCallbackReturnValue( + f"No-output callback received return value: {output_value}" + ) - if not has_update: - raise PreventUpdate + has_update = ( + _set_side_update(callback_ctx, response) + or has_output + or response["sideUpdate"] + ) - response["response"] = component_ids + if not has_update: + raise PreventUpdate - if len(ComponentRegistry.registry) != len(original_packages): - diff_packages = list( - set(ComponentRegistry.registry).difference(original_packages) + if len(ComponentRegistry.registry) != len(original_packages): + diff_packages = list( + set(ComponentRegistry.registry).difference(original_packages) + ) + if not allow_dynamic_callbacks: + raise ImportedInsideCallbackError( + f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" + "You can set `_allow_dynamic_callbacks` to allow for development purpose only." ) - if not allow_dynamic_callbacks: - raise ImportedInsideCallbackError( - f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" - "You can set `_allow_dynamic_callbacks` to allow for development purpose only." - ) - dist = app.get_dist(diff_packages) - response["dist"] = dist + dist = app.get_dist(diff_packages) + response["dist"] = dist + return response.update({"response": component_ids}) - try: - jsonResponse = to_json(response) - except TypeError: - _validate.fail_callback_output(output_value, output) + # pylint: disable=too-many-locals + def wrap_func(func): - return jsonResponse + if long is not None: + long_key = BaseLongCallbackManager.register_func( + func, + long.get("progress") is not None, + callback_id, + ) + @wraps(func) def add_context(*args, **kwargs): - output_spec = kwargs.pop("outputs_list") - app_callback_manager = kwargs.pop("long_callback_manager", None) - - callback_ctx = kwargs.pop( - "callback_context", AttributeDict({"updated_props": {}}) - ) - app = kwargs.pop("app", None) - callback_manager = long and long.get("manager", app_callback_manager) + """Handles synchronous callbacks with context management.""" error_handler = on_error or kwargs.pop("app_on_error", None) - original_packages = set(ComponentRegistry.registry) - if has_output: - _validate.validate_output_spec(insert_output, output_spec, Output) - - context_value.set(callback_ctx) - - func_args, func_kwargs = _validate.validate_and_group_input_args( - args, inputs_state_indices + output_spec, callback_ctx, func_args, func_kwargs, app = initialize_context( + args, kwargs, inputs_state_indices ) response: dict = {"multi": True} - has_update = False - - if long is not None: - if not callback_manager: - raise MissingLongCallbackManagerError( - "Running `long` callbacks requires a manager to be installed.\n" - "Available managers:\n" - "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" - " and store results on the local filesystem.\n" - "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" - " and store results on redis.\n" - ) - - progress_outputs = long.get("progress") - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") - old_job = flask.request.args.getlist("oldJob") - - current_key = callback_manager.build_cache_key( - func, - # Inputs provided as dict is kwargs. - func_args if func_args else func_kwargs, - long.get("cache_args_to_ignore", []), - ) - if old_job: - for job in old_job: - callback_manager.terminate_job(job) - - if not cache_key: - cache_key = current_key - - job_fn = callback_manager.func_registry.get(long_key) - - ctx_value = AttributeDict(**context_value.get()) - ctx_value.ignore_register_page = True - ctx_value.pop("background_callback_manager") - ctx_value.pop("dash_response") - - job = callback_manager.call_job_fn( - cache_key, - job_fn, - func_args if func_args else func_kwargs, - ctx_value, - ) - - data = { - "cacheKey": cache_key, - "job": job, - } - - cancel = long.get("cancel") - if cancel: - data["cancel"] = cancel - - progress_default = long.get("progressDefault") - if progress_default: - data["progressDefault"] = { - str(o): x - for o, x in zip(progress_outputs, progress_default) - } - return to_json(data) - if progress_outputs: - # Get the progress before the result as it would be erased after the results. - progress = callback_manager.get_progress(cache_key) - if progress: - response["progress"] = { - str(x): progress[i] for i, x in enumerate(progress_outputs) - } - - output_value = callback_manager.get_result(cache_key, job_id) - # Must get job_running after get_result since get_results terminates it. - job_running = callback_manager.job_running(job_id) - if not job_running and output_value is callback_manager.UNDEFINED: - # Job canceled -> no output to close the loop. - output_value = NoUpdate() - - elif ( - isinstance(output_value, dict) - and "long_callback_error" in output_value - ): - error = output_value.get("long_callback_error", {}) - exc = LongCallbackError( - f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" + try: + if long is not None: + output_value, skip = handle_long_callback( + kwargs, + long, + long_key, + response, + error_handler, + func, + func_args, + func_kwargs, ) - if error_handler: - output_value = error_handler(exc) - - if output_value is None: - output_value = NoUpdate() - # set_props from the error handler uses the original ctx - # instead of manager.get_updated_props since it runs in the - # request process. - has_update = ( - _set_side_update(callback_ctx, response) - or output_value is not None - ) - else: - raise exc - - if job_running and output_value is not callback_manager.UNDEFINED: - # cached results. - callback_manager.terminate_job(job_id) - - if multi and isinstance(output_value, (list, tuple)): - output_value = [ - NoUpdate() if NoUpdate.is_no_update(r) else r - for r in output_value - ] - updated_props = callback_manager.get_updated_props(cache_key) - if len(updated_props) > 0: - response["sideUpdate"] = updated_props - has_update = True - - if output_value is callback_manager.UNDEFINED: - return to_json(response) - else: - try: + if skip: + return output_value + else: output_value = _invoke_callback(func, *func_args, **func_kwargs) - except PreventUpdate as err: - raise err - except Exception as err: # pylint: disable=broad-exception-caught - if error_handler: - output_value = error_handler(err) - - # If the error returns nothing, automatically puts NoUpdate for response. - if output_value is None and has_output: - output_value = NoUpdate() - else: - raise err - - component_ids = collections.defaultdict(dict) - - if has_output: - if not multi: - output_value, output_spec = [output_value], [output_spec] - flat_output_values = output_value + except PreventUpdate: + raise + except Exception as err: # pylint: disable=broad-exception-caught + if error_handler: + output_value = error_handler(err) + if output_value is None and output_spec: + output_value = NoUpdate() else: - if isinstance(output_value, (list, tuple)): - # For multi-output, allow top-level collection to be - # list or tuple - output_value = list(output_value) - - if NoUpdate.is_no_update(output_value): - flat_output_values = [output_value] - else: - # Flatten grouping and validate grouping structure - flat_output_values = flatten_grouping(output_value, output) - - if not NoUpdate.is_no_update(output_value): - _validate.validate_multi_return( - output_spec, flat_output_values, callback_id - ) + raise err - for val, spec in zip(flat_output_values, output_spec): - if NoUpdate.is_no_update(val): - continue - for vali, speci in ( - zip(val, spec) if isinstance(spec, list) else [[val, spec]] - ): - if not NoUpdate.is_no_update(vali): - has_update = True - id_str = stringify_id(speci["id"]) - prop = clean_property_name(speci["property"]) - component_ids[id_str][prop] = vali - else: - if output_value is not None: - raise InvalidCallbackReturnValue( - f"No-output callback received return value: {output_value}" - ) - output_value = [] - flat_output_values = [] + prepare_response( + output_value, + output_spec, + multi, + response, + callback_ctx, + app, + ) + try: + jsonResponse = to_json(response) + except TypeError: + _validate.fail_callback_output(output_value, output) - if not long: - has_update = _set_side_update(callback_ctx, response) or has_update + return jsonResponse - if not has_update: - raise PreventUpdate + @wraps(func) + async def async_add_context(*args, **kwargs): + """Handles async callbacks with context management.""" + error_handler = on_error or kwargs.pop("app_on_error", None) - response["response"] = component_ids + output_spec, callback_ctx, func_args, func_kwargs, app = initialize_context( + args, kwargs, inputs_state_indices + ) - if len(ComponentRegistry.registry) != len(original_packages): - diff_packages = list( - set(ComponentRegistry.registry).difference(original_packages) - ) - if not allow_dynamic_callbacks: - raise ImportedInsideCallbackError( - f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" - "You can set `_allow_dynamic_callbacks` to allow for development purpose only." + response: dict = {"multi": True} + + try: + if long is not None: + output_value, skip = handle_long_callback( + kwargs, + long, + long_key, + response, + error_handler, + func, + func_args, + func_kwargs, + ) + if skip: + return output_value + else: + output_value = await _async_invoke_callback( + func, *func_args, **func_kwargs ) - dist = app.get_dist(diff_packages) - response["dist"] = dist + except PreventUpdate: + raise + except Exception as err: # pylint: disable=broad-exception-caught + if error_handler: + output_value = error_handler(err) + if output_value is None and output_spec: + output_value = NoUpdate() + else: + raise err + prepare_response( + output_value, + has_output, + multi, + response, + callback_ctx, + app, + ) try: jsonResponse = to_json(response) except TypeError: From 96df44e0aaf026cc4e97c7d52082a8fd787bf347 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 5 Dec 2024 11:01:21 -0500 Subject: [PATCH 18/38] fixing for multi outputs --- dash/_callback.py | 114 +++++++++++++++++++++++++++++++++++----------- 1 file changed, 87 insertions(+), 27 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 6f601d0ac9..08992f3d55 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -356,14 +356,12 @@ def register_callback( def initialize_context(args, kwargs, inputs_state_indices): """Initialize context and validate output specifications.""" app = kwargs.pop("app", None) - output_spec = kwargs.pop("outputs_list", []) + output_spec = kwargs.pop("outputs_list") callback_ctx = kwargs.pop( "callback_context", AttributeDict({"updated_props": {}}) ) context_value.set(callback_ctx) - has_output = False - if len(output_spec) > 0: - has_output = True + original_packages = set(ComponentRegistry.registry) if has_output: _validate.validate_output_spec(insert_output, output_spec, Output) @@ -371,10 +369,27 @@ def initialize_context(args, kwargs, inputs_state_indices): func_args, func_kwargs = _validate.validate_and_group_input_args( args, inputs_state_indices ) - return output_spec, callback_ctx, func_args, func_kwargs, app + return ( + output_spec, + callback_ctx, + func_args, + func_kwargs, + app, + original_packages, + False, + ) def handle_long_callback( - kwargs, long, long_key, response, error_handler, func, func_args, func_kwargs + kwargs, + long, + long_key, + callback_ctx, + response, + error_handler, + func, + func_args, + func_kwargs, + has_update=False, ): """Set up the long callback and manage jobs.""" callback_manager = long.get( @@ -437,7 +452,7 @@ def handle_long_callback( data["progressDefault"] = { str(o): x for o, x in zip(progress_outputs, progress_default) } - return to_json(data), True + return to_json(data), True, has_update if progress_outputs: # Get the progress before the result as it would be erased after the results. progress = callback_manager.get_progress(cache_key) @@ -463,6 +478,12 @@ def handle_long_callback( if output_value is None: output_value = NoUpdate() + # set_props from the error handler uses the original ctx + # instead of manager.get_updated_props since it runs in the + # request process. + has_update = ( + _set_side_update(callback_ctx, response) or output_value is not None + ) else: raise exc @@ -477,24 +498,45 @@ def handle_long_callback( updated_props = callback_manager.get_updated_props(cache_key) if len(updated_props) > 0: response["sideUpdate"] = updated_props + has_update = True if output_value is callback_manager.UNDEFINED: - return to_json(response), True - return output_value, False - - def prepare_response(output_value, output_spec, multi, response, callback_ctx, app): + return to_json(response), True, has_update + return output_value, False, has_update + + def prepare_response( + output_value, + output_spec, + multi, + response, + callback_ctx, + app, + original_packages, + long, + has_update, + ): """Prepare the response object based on the callback output.""" component_ids = collections.defaultdict(dict) - original_packages = set(ComponentRegistry.registry) - if output_spec: + if has_output: if not multi: output_value, output_spec = [output_value], [output_spec] flat_output_values = output_value else: if isinstance(output_value, (list, tuple)): + # For multi-output, allow top-level collection to be + # list or tuple output_value = list(output_value) - flat_output_values = flatten_grouping(output_value, output_spec) + if NoUpdate.is_no_update(output_value): + flat_output_values = [output_value] + else: + # Flatten grouping and validate grouping structure + flat_output_values = flatten_grouping(output_value, output) + + if not NoUpdate.is_no_update(output_value): + _validate.validate_multi_return( + output_spec, flat_output_values, callback_id + ) for val, spec in zip(flat_output_values, output_spec): if NoUpdate.is_no_update(val): @@ -503,6 +545,7 @@ def prepare_response(output_value, output_spec, multi, response, callback_ctx, a zip(val, spec) if isinstance(spec, list) else [[val, spec]] ): if not NoUpdate.is_no_update(vali): + has_update = True id_str = stringify_id(speci["id"]) prop = clean_property_name(speci["property"]) component_ids[id_str][prop] = vali @@ -513,11 +556,8 @@ def prepare_response(output_value, output_spec, multi, response, callback_ctx, a f"No-output callback received return value: {output_value}" ) - has_update = ( - _set_side_update(callback_ctx, response) - or has_output - or response["sideUpdate"] - ) + if not long: + has_update = _set_side_update(callback_ctx, response) or has_output if not has_update: raise PreventUpdate @@ -550,18 +590,25 @@ def add_context(*args, **kwargs): """Handles synchronous callbacks with context management.""" error_handler = on_error or kwargs.pop("app_on_error", None) - output_spec, callback_ctx, func_args, func_kwargs, app = initialize_context( - args, kwargs, inputs_state_indices - ) + ( + output_spec, + callback_ctx, + func_args, + func_kwargs, + app, + original_packages, + has_update, + ) = initialize_context(args, kwargs, inputs_state_indices) response: dict = {"multi": True} try: if long is not None: - output_value, skip = handle_long_callback( + output_value, skip, has_update = handle_long_callback( kwargs, long, long_key, + callback_ctx, response, error_handler, func, @@ -589,6 +636,9 @@ def add_context(*args, **kwargs): response, callback_ctx, app, + original_packages, + long, + has_update, ) try: jsonResponse = to_json(response) @@ -602,18 +652,25 @@ async def async_add_context(*args, **kwargs): """Handles async callbacks with context management.""" error_handler = on_error or kwargs.pop("app_on_error", None) - output_spec, callback_ctx, func_args, func_kwargs, app = initialize_context( - args, kwargs, inputs_state_indices - ) + ( + output_spec, + callback_ctx, + func_args, + func_kwargs, + app, + original_packages, + has_update, + ) = initialize_context(args, kwargs, inputs_state_indices) response: dict = {"multi": True} try: if long is not None: - output_value, skip = handle_long_callback( + output_value, skip, has_update = handle_long_callback( kwargs, long, long_key, + callback_ctx, response, error_handler, func, @@ -643,6 +700,9 @@ async def async_add_context(*args, **kwargs): response, callback_ctx, app, + original_packages, + long, + has_update, ) try: jsonResponse = to_json(response) From 91400a3886591d8828c3e5d9dc360d284f43dc80 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 5 Dec 2024 16:39:14 -0500 Subject: [PATCH 19/38] attempt no 1 refactoring background callbacks for async functions --- dash/_callback.py | 192 +++++++++++------- dash/long_callback/managers/celery_manager.py | 55 ++++- .../managers/diskcache_manager.py | 105 +++++++++- 3 files changed, 270 insertions(+), 82 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 08992f3d55..54a0e72579 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -379,21 +379,10 @@ def initialize_context(args, kwargs, inputs_state_indices): False, ) - def handle_long_callback( - kwargs, - long, - long_key, - callback_ctx, - response, - error_handler, - func, - func_args, - func_kwargs, - has_update=False, - ): + def get_callback_manager(kwargs): """Set up the long callback and manage jobs.""" callback_manager = long.get( - "manager", kwargs.pop("long_callback_manager", None) + "manager", kwargs.get("long_callback_manager", None) ) if not callback_manager: raise MissingLongCallbackManagerError( @@ -405,63 +394,120 @@ def handle_long_callback( " and store results on redis.\n" ) - progress_outputs = long.get("progress") - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") old_job = flask.request.args.getlist("oldJob") - current_key = callback_manager.build_cache_key( + if old_job: + for job in old_job: + callback_manager.terminate_job(job) + + return callback_manager + + def setup_long_callback( + kwargs, + long, + long_key, + func, + func_args, + func_kwargs, + ): + """Set up the long callback and manage jobs.""" + callback_manager = get_callback_manager(kwargs) + + progress_outputs = long.get("progress") + + cache_key = callback_manager.build_cache_key( func, # Inputs provided as dict is kwargs. func_args if func_args else func_kwargs, long.get("cache_args_to_ignore", []), ) - if old_job: - for job in old_job: - callback_manager.terminate_job(job) + job_fn = callback_manager.func_registry.get(long_key) - if not cache_key: - cache_key = current_key + ctx_value = AttributeDict(**context_value.get()) + ctx_value.ignore_register_page = True + ctx_value.pop("background_callback_manager") + ctx_value.pop("dash_response") - job_fn = callback_manager.func_registry.get(long_key) + job = callback_manager.call_job_fn( + cache_key, + job_fn, + func_args if func_args else func_kwargs, + ctx_value, + ) - ctx_value = AttributeDict(**context_value.get()) - ctx_value.ignore_register_page = True - ctx_value.pop("background_callback_manager") - ctx_value.pop("dash_response") + data = { + "cacheKey": cache_key, + "job": job, + } - job = callback_manager.call_job_fn( - cache_key, - job_fn, - func_args if func_args else func_kwargs, - ctx_value, - ) + cancel = long.get("cancel") + if cancel: + data["cancel"] = cancel - data = { - "cacheKey": cache_key, - "job": job, + progress_default = long.get("progressDefault") + if progress_default: + data["progressDefault"] = { + str(o): x for o, x in zip(progress_outputs, progress_default) } + return to_json(data) - cancel = long.get("cancel") - if cancel: - data["cancel"] = cancel + def progress_long_callback(response, callback_manager): + progress_outputs = long.get("progress") + cache_key = flask.request.args.get("cacheKey") - progress_default = long.get("progressDefault") - if progress_default: - data["progressDefault"] = { - str(o): x for o, x in zip(progress_outputs, progress_default) - } - return to_json(data), True, has_update if progress_outputs: # Get the progress before the result as it would be erased after the results. progress = callback_manager.get_progress(cache_key) if progress: - response["progress"] = { - str(x): progress[i] for i, x in enumerate(progress_outputs) - } + response.update( + { + "progress": { + str(x): progress[i] for i, x in enumerate(progress_outputs) + } + } + ) + + def update_long_callback(error_handler, callback_ctx, response, kwargs): + """Set up the long callback and manage jobs.""" + callback_manager = get_callback_manager(kwargs) + + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") output_value = callback_manager.get_result(cache_key, job_id) + + progress_long_callback(response, callback_manager) + + return handle_rest_long_callback( + output_value, callback_manager, response, error_handler, callback_ctx + ) + + async def async_update_long_callback(error_handler, callback_ctx, response, kwargs): + """Set up the long callback and manage jobs.""" + callback_manager = get_callback_manager(kwargs) + + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") + + output_value = await callback_manager.async_get_result(cache_key, job_id) + + progress_long_callback(response, callback_manager) + + return handle_rest_long_callback( + output_value, callback_manager, response, error_handler, callback_ctx + ) + + def handle_rest_long_callback( + output_value, + callback_manager, + response, + error_handler, + callback_ctx, + has_update=False, + ): + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") # Must get job_running after get_result since get_results terminates it. job_running = callback_manager.job_running(job_id) if not job_running and output_value is callback_manager.UNDEFINED: @@ -501,8 +547,8 @@ def handle_long_callback( has_update = True if output_value is callback_manager.UNDEFINED: - return to_json(response), True, has_update - return output_value, False, has_update + return to_json(response), has_update, True + return output_value, has_update, False def prepare_response( output_value, @@ -577,7 +623,6 @@ def prepare_response( # pylint: disable=too-many-locals def wrap_func(func): - if long is not None: long_key = BaseLongCallbackManager.register_func( func, @@ -604,16 +649,18 @@ def add_context(*args, **kwargs): try: if long is not None: - output_value, skip, has_update = handle_long_callback( - kwargs, - long, - long_key, - callback_ctx, - response, - error_handler, - func, - func_args, - func_kwargs, + if not flask.request.args.get("cacheKey"): + return setup_long_callback( + kwargs, + long, + long_key, + func, + func_args, + func_kwargs, + ) + + output_value, has_update, skip = update_long_callback( + error_handler, callback_ctx, response, kwargs ) if skip: return output_value @@ -666,16 +713,17 @@ async def async_add_context(*args, **kwargs): try: if long is not None: - output_value, skip, has_update = handle_long_callback( - kwargs, - long, - long_key, - callback_ctx, - response, - error_handler, - func, - func_args, - func_kwargs, + if not flask.request.args.get("cacheKey"): + return setup_long_callback( + kwargs, + long, + long_key, + func, + func_args, + func_kwargs, + ) + output_value, has_update, skip = update_long_callback( + error_handler, callback_ctx, response, kwargs ) if skip: return output_value @@ -695,7 +743,7 @@ async def async_add_context(*args, **kwargs): prepare_response( output_value, - has_output, + output_spec, multi, response, callback_ctx, diff --git a/dash/long_callback/managers/celery_manager.py b/dash/long_callback/managers/celery_manager.py index 612cc245fb..3f8324ea58 100644 --- a/dash/long_callback/managers/celery_manager.py +++ b/dash/long_callback/managers/celery_manager.py @@ -9,6 +9,8 @@ from dash.exceptions import PreventUpdate from dash.long_callback._proxy_set_props import ProxySetProps from dash.long_callback.managers import BaseLongCallbackManager +import asyncio +from functools import partial class CeleryManager(BaseLongCallbackManager): @@ -90,7 +92,14 @@ def clear_cache_entry(self, key): self.handle.backend.delete(key) def call_job_fn(self, key, job_fn, args, context): - task = job_fn.delay(key, self._make_progress_key(key), args, context) + if asyncio.iscoroutinefunction(job_fn): + # pylint: disable-next=import-outside-toplevel,no-name-in-module,import-error + from asgiref.sync import async_to_sync + + new_job_fun = async_to_sync(job_fn) + task = new_job_fun.delay(key, self._make_progress_key(key), args, context) + else: + task = job_fn.delay(key, self._make_progress_key(key), args, context) return task.task_id def get_progress(self, key): @@ -197,7 +206,49 @@ def run(): result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder) ) - ctx.run(run) + async def async_run(): + c = AttributeDict(**context) + c.ignore_register_page = False + c.updated_props = ProxySetProps(_set_props) + context_value.set(c) + errored = False + try: + if isinstance(user_callback_args, dict): + user_callback_output = await fn( + *maybe_progress, **user_callback_args + ) + elif isinstance(user_callback_args, (list, tuple)): + user_callback_output = await fn( + *maybe_progress, *user_callback_args + ) + else: + user_callback_output = await fn(*maybe_progress, user_callback_args) + except PreventUpdate: + errored = True + cache.set(result_key, {"_dash_no_update": "_dash_no_update"}) + except Exception as err: # pylint: disable=broad-except + errored = True + cache.set( + result_key, + { + "long_callback_error": { + "msg": str(err), + "tb": traceback.format_exc(), + } + }, + ) + if asyncio.iscoroutine(user_callback_output): + user_callback_output = await user_callback_output + if not errored: + cache.set( + result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder) + ) + + if asyncio.iscoroutinefunction(fn): + func = partial(ctx.run, async_run) + asyncio.run(func()) + else: + ctx.run(run) return job_fn diff --git a/dash/long_callback/managers/diskcache_manager.py b/dash/long_callback/managers/diskcache_manager.py index e1a110f14f..cf23835e40 100644 --- a/dash/long_callback/managers/diskcache_manager.py +++ b/dash/long_callback/managers/diskcache_manager.py @@ -6,6 +6,8 @@ from ..._callback_context import context_value from ..._utils import AttributeDict from ...exceptions import PreventUpdate +import asyncio +from functools import partial _pending_value = "__$pending__" @@ -116,16 +118,63 @@ def clear_cache_entry(self, key): # noinspection PyUnresolvedReferences def call_job_fn(self, key, job_fn, args, context): + """ + Call the job function, supporting both sync and async jobs. + Args: + key: Cache key for the job. + job_fn: The job function to execute. + args: Arguments for the job function. + context: Context for the job. + Returns: + The PID of the spawned process or None for async execution. + """ # pylint: disable-next=import-outside-toplevel,no-name-in-module,import-error from multiprocess import Process - # pylint: disable-next=not-callable - proc = Process( - target=job_fn, - args=(key, self._make_progress_key(key), args, context), - ) - proc.start() - return proc.pid + # Check if the job is asynchronous + if asyncio.iscoroutinefunction(job_fn): + # For async jobs, run in an event loop in a new process + process = Process( + target=self._run_async_in_process, + args=(job_fn, key, args, context), + ) + process.start() + return process.pid + else: + # For sync jobs, use the existing implementation + # pylint: disable-next=not-callable + process = Process( + target=job_fn, + args=(key, self._make_progress_key(key), args, context), + ) + process.start() + return process.pid + + @staticmethod + def _run_async_in_process(job_fn, key, args, context): + """ + Helper function to run an async job in a new process. + Args: + job_fn: The async job function. + key: Cache key for the job. + args: Arguments for the job function. + context: Context for the job. + """ + # Create a new event loop for the process + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + # Wrap the job function to include key and progress + async_job = partial(job_fn, key, args, context) + + try: + # Run the async job and wait for completion + loop.run_until_complete(async_job()) + except Exception as e: + # Handle errors, log them, and cache if necessary + raise str(e) + finally: + loop.close() def get_progress(self, key): progress_key = self._make_progress_key(key) @@ -214,7 +263,47 @@ def run(): if not errored: cache.set(result_key, user_callback_output) - ctx.run(run) + async def async_run(): + c = AttributeDict(**context) + c.ignore_register_page = False + c.updated_props = ProxySetProps(_set_props) + context_value.set(c) + errored = False + try: + if isinstance(user_callback_args, dict): + user_callback_output = await fn( + *maybe_progress, **user_callback_args + ) + elif isinstance(user_callback_args, (list, tuple)): + user_callback_output = await fn( + *maybe_progress, *user_callback_args + ) + else: + user_callback_output = await fn(*maybe_progress, user_callback_args) + except PreventUpdate: + errored = True + cache.set(result_key, {"_dash_no_update": "_dash_no_update"}) + except Exception as err: # pylint: disable=broad-except + errored = True + cache.set( + result_key, + { + "long_callback_error": { + "msg": str(err), + "tb": traceback.format_exc(), + } + }, + ) + if asyncio.iscoroutine(user_callback_output): + user_callback_output = await user_callback_output + if not errored: + cache.set(result_key, user_callback_output) + + if asyncio.iscoroutinefunction(fn): + func = partial(ctx.run, async_run) + asyncio.run(func()) + else: + ctx.run(run) return job_fn From 671cb2b1e1cef1bb3d8da0ffc79a8ee5c10883ab Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Thu, 5 Dec 2024 17:41:14 -0500 Subject: [PATCH 20/38] fixing for lint and progress outputs --- dash/_callback.py | 19 +------- dash/long_callback/managers/celery_manager.py | 43 +++++++++++-------- .../managers/diskcache_manager.py | 33 ++++++-------- 3 files changed, 38 insertions(+), 57 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 54a0e72579..d75cebd57c 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -300,7 +300,7 @@ def _set_side_update(ctx, response) -> bool: return False -# pylint: disable=too-many-branches,too-many-statements +# pylint: disable=too-many-branches,too-many-statements,too-many-locals def register_callback( callback_list, callback_map, config_prevent_initial_callbacks, *_args, **_kwargs ): @@ -475,24 +475,9 @@ def update_long_callback(error_handler, callback_ctx, response, kwargs): cache_key = flask.request.args.get("cacheKey") job_id = flask.request.args.get("job") - output_value = callback_manager.get_result(cache_key, job_id) - progress_long_callback(response, callback_manager) - return handle_rest_long_callback( - output_value, callback_manager, response, error_handler, callback_ctx - ) - - async def async_update_long_callback(error_handler, callback_ctx, response, kwargs): - """Set up the long callback and manage jobs.""" - callback_manager = get_callback_manager(kwargs) - - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") - - output_value = await callback_manager.async_get_result(cache_key, job_id) - - progress_long_callback(response, callback_manager) + output_value = callback_manager.get_result(cache_key, job_id) return handle_rest_long_callback( output_value, callback_manager, response, error_handler, callback_ctx diff --git a/dash/long_callback/managers/celery_manager.py b/dash/long_callback/managers/celery_manager.py index 3f8324ea58..3a41af7dcb 100644 --- a/dash/long_callback/managers/celery_manager.py +++ b/dash/long_callback/managers/celery_manager.py @@ -1,6 +1,8 @@ import json import traceback from contextvars import copy_context +import asyncio +from functools import partial from _plotly_utils.utils import PlotlyJSONEncoder @@ -9,8 +11,6 @@ from dash.exceptions import PreventUpdate from dash.long_callback._proxy_set_props import ProxySetProps from dash.long_callback.managers import BaseLongCallbackManager -import asyncio -from functools import partial class CeleryManager(BaseLongCallbackManager): @@ -92,14 +92,7 @@ def clear_cache_entry(self, key): self.handle.backend.delete(key) def call_job_fn(self, key, job_fn, args, context): - if asyncio.iscoroutinefunction(job_fn): - # pylint: disable-next=import-outside-toplevel,no-name-in-module,import-error - from asgiref.sync import async_to_sync - - new_job_fun = async_to_sync(job_fn) - task = new_job_fun.delay(key, self._make_progress_key(key), args, context) - else: - task = job_fn.delay(key, self._make_progress_key(key), args, context) + task = job_fn.delay(key, self._make_progress_key(key), args, context) return task.task_id def get_progress(self, key): @@ -144,11 +137,13 @@ def get_updated_props(self, key): return json.loads(updated_props) -def _make_job_fn(fn, celery_app, progress, key): +def _make_job_fn(fn, celery_app, progress, key): # pylint: disable=too-many-statements cache = celery_app.backend @celery_app.task(name=f"long_callback_{key}") - def job_fn(result_key, progress_key, user_callback_args, context=None): + def job_fn( + result_key, progress_key, user_callback_args, context=None + ): # pylint: disable=too-many-statements def _set_progress(progress_value): if not isinstance(progress_value, (list, tuple)): progress_value = [progress_value] @@ -224,21 +219,31 @@ async def async_run(): else: user_callback_output = await fn(*maybe_progress, user_callback_args) except PreventUpdate: + # Put NoUpdate dict directly to avoid circular imports. errored = True - cache.set(result_key, {"_dash_no_update": "_dash_no_update"}) + cache.set( + result_key, + json.dumps( + {"_dash_no_update": "_dash_no_update"}, cls=PlotlyJSONEncoder + ), + ) except Exception as err: # pylint: disable=broad-except errored = True cache.set( result_key, - { - "long_callback_error": { - "msg": str(err), - "tb": traceback.format_exc(), - } - }, + json.dumps( + { + "long_callback_error": { + "msg": str(err), + "tb": traceback.format_exc(), + } + }, + ), ) + if asyncio.iscoroutine(user_callback_output): user_callback_output = await user_callback_output + if not errored: cache.set( result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder) diff --git a/dash/long_callback/managers/diskcache_manager.py b/dash/long_callback/managers/diskcache_manager.py index cf23835e40..78c70c0d41 100644 --- a/dash/long_callback/managers/diskcache_manager.py +++ b/dash/long_callback/managers/diskcache_manager.py @@ -1,13 +1,13 @@ import traceback from contextvars import copy_context +import asyncio +from functools import partial from . import BaseLongCallbackManager from .._proxy_set_props import ProxySetProps from ..._callback_context import context_value from ..._utils import AttributeDict from ...exceptions import PreventUpdate -import asyncio -from functools import partial _pending_value = "__$pending__" @@ -131,24 +131,13 @@ def call_job_fn(self, key, job_fn, args, context): # pylint: disable-next=import-outside-toplevel,no-name-in-module,import-error from multiprocess import Process - # Check if the job is asynchronous - if asyncio.iscoroutinefunction(job_fn): - # For async jobs, run in an event loop in a new process - process = Process( - target=self._run_async_in_process, - args=(job_fn, key, args, context), - ) - process.start() - return process.pid - else: - # For sync jobs, use the existing implementation - # pylint: disable-next=not-callable - process = Process( - target=job_fn, - args=(key, self._make_progress_key(key), args, context), - ) - process.start() - return process.pid + # pylint: disable-next=not-callable + process = Process( + target=job_fn, + args=(key, self._make_progress_key(key), args, context), + ) + process.start() + return process.pid @staticmethod def _run_async_in_process(job_fn, key, args, context): @@ -172,7 +161,7 @@ def _run_async_in_process(job_fn, key, args, context): loop.run_until_complete(async_job()) except Exception as e: # Handle errors, log them, and cache if necessary - raise str(e) + raise Exception(str(e)) from e finally: loop.close() @@ -217,7 +206,9 @@ def get_updated_props(self, key): return result +# pylint: disable-next=too-many-statements def _make_job_fn(fn, cache, progress): + # pylint: disable-next=too-many-statements def job_fn(result_key, progress_key, user_callback_args, context): def _set_progress(progress_value): if not isinstance(progress_value, (list, tuple)): From f19fe231f1c6d534e57f7249f85416fa0a2cb6bc Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Fri, 6 Dec 2024 11:56:27 -0500 Subject: [PATCH 21/38] lint adjustments --- dash/_callback.py | 549 ++++++++++++++++++++++++---------------------- 1 file changed, 285 insertions(+), 264 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index d75cebd57c..d8e901365e 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -300,7 +300,269 @@ def _set_side_update(ctx, response) -> bool: return False -# pylint: disable=too-many-branches,too-many-statements,too-many-locals +def _initialize_context(args, kwargs, inputs_state_indices, has_output, insert_output): + """Initialize context and validate output specifications.""" + app = kwargs.pop("app", None) + output_spec = kwargs.pop("outputs_list") + callback_ctx = kwargs.pop("callback_context", AttributeDict({"updated_props": {}})) + context_value.set(callback_ctx) + original_packages = set(ComponentRegistry.registry) + + if has_output: + _validate.validate_output_spec(insert_output, output_spec, Output) + + func_args, func_kwargs = _validate.validate_and_group_input_args( + args, inputs_state_indices + ) + return ( + output_spec, + callback_ctx, + func_args, + func_kwargs, + app, + original_packages, + False, + ) + + +def _get_callback_manager(kwargs, long): + """Set up the long callback and manage jobs.""" + callback_manager = long.get("manager", kwargs.get("long_callback_manager", None)) + if not callback_manager: + raise MissingLongCallbackManagerError( + "Running `long` callbacks requires a manager to be installed.\n" + "Available managers:\n" + "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" + " and store results on the local filesystem.\n" + "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" + " and store results on redis.\n" + ) + + old_job = flask.request.args.getlist("oldJob") + + if old_job: + for job in old_job: + callback_manager.terminate_job(job) + + return callback_manager + + +def _setup_long_callback( + kwargs, + long, + long_key, + func, + func_args, + func_kwargs, +): + """Set up the long callback and manage jobs.""" + callback_manager = _get_callback_manager(kwargs, long) + + progress_outputs = long.get("progress") + + cache_key = callback_manager.build_cache_key( + func, + # Inputs provided as dict is kwargs. + func_args if func_args else func_kwargs, + long.get("cache_args_to_ignore", []), + ) + + job_fn = callback_manager.func_registry.get(long_key) + + ctx_value = AttributeDict(**context_value.get()) + ctx_value.ignore_register_page = True + ctx_value.pop("background_callback_manager") + ctx_value.pop("dash_response") + + job = callback_manager.call_job_fn( + cache_key, + job_fn, + func_args if func_args else func_kwargs, + ctx_value, + ) + + data = { + "cacheKey": cache_key, + "job": job, + } + + cancel = long.get("cancel") + if cancel: + data["cancel"] = cancel + + progress_default = long.get("progressDefault") + if progress_default: + data["progressDefault"] = { + str(o): x for o, x in zip(progress_outputs, progress_default) + } + return to_json(data) + + +def _progress_long_callback(response, callback_manager, long): + progress_outputs = long.get("progress") + cache_key = flask.request.args.get("cacheKey") + + if progress_outputs: + # Get the progress before the result as it would be erased after the results. + progress = callback_manager.get_progress(cache_key) + if progress: + response.update( + { + "progress": { + str(x): progress[i] for i, x in enumerate(progress_outputs) + } + } + ) + + +def _update_long_callback(error_handler, callback_ctx, response, kwargs, long, multi): + """Set up the long callback and manage jobs.""" + callback_manager = _get_callback_manager(kwargs, long) + + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") + + _progress_long_callback(response, callback_manager, long) + + output_value = callback_manager.get_result(cache_key, job_id) + + return _handle_rest_long_callback( + output_value, callback_manager, response, error_handler, callback_ctx, multi + ) + + +def _handle_rest_long_callback( + output_value, + callback_manager, + response, + error_handler, + callback_ctx, + multi, + has_update=False, +): + cache_key = flask.request.args.get("cacheKey") + job_id = flask.request.args.get("job") + # Must get job_running after get_result since get_results terminates it. + job_running = callback_manager.job_running(job_id) + if not job_running and output_value is callback_manager.UNDEFINED: + # Job canceled -> no output to close the loop. + output_value = NoUpdate() + + elif isinstance(output_value, dict) and "long_callback_error" in output_value: + error = output_value.get("long_callback_error", {}) + exc = LongCallbackError( + f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" + ) + if error_handler: + output_value = error_handler(exc) + + if output_value is None: + output_value = NoUpdate() + # set_props from the error handler uses the original ctx + # instead of manager.get_updated_props since it runs in the + # request process. + has_update = ( + _set_side_update(callback_ctx, response) or output_value is not None + ) + else: + raise exc + + if job_running and output_value is not callback_manager.UNDEFINED: + # cached results. + callback_manager.terminate_job(job_id) + + if multi and isinstance(output_value, (list, tuple)): + output_value = [ + NoUpdate() if NoUpdate.is_no_update(r) else r for r in output_value + ] + updated_props = callback_manager.get_updated_props(cache_key) + if len(updated_props) > 0: + response["sideUpdate"] = updated_props + has_update = True + + if output_value is callback_manager.UNDEFINED: + return to_json(response), has_update, True + return output_value, has_update, False + + +# pylint: disable=too-many-branches +def _prepare_response( + output_value, + output_spec, + multi, + response, + callback_ctx, + app, + original_packages, + long, + has_update, + has_output, + output, + callback_id, + allow_dynamic_callbacks, +): + """Prepare the response object based on the callback output.""" + component_ids = collections.defaultdict(dict) + + if has_output: + if not multi: + output_value, output_spec = [output_value], [output_spec] + flat_output_values = output_value + else: + if isinstance(output_value, (list, tuple)): + # For multi-output, allow top-level collection to be + # list or tuple + output_value = list(output_value) + if NoUpdate.is_no_update(output_value): + flat_output_values = [output_value] + else: + # Flatten grouping and validate grouping structure + flat_output_values = flatten_grouping(output_value, output) + + if not NoUpdate.is_no_update(output_value): + _validate.validate_multi_return( + output_spec, flat_output_values, callback_id + ) + + for val, spec in zip(flat_output_values, output_spec): + if NoUpdate.is_no_update(val): + continue + for vali, speci in ( + zip(val, spec) if isinstance(spec, list) else [[val, spec]] + ): + if not NoUpdate.is_no_update(vali): + has_update = True + id_str = stringify_id(speci["id"]) + prop = clean_property_name(speci["property"]) + component_ids[id_str][prop] = vali + + else: + if output_value is not None: + raise InvalidCallbackReturnValue( + f"No-output callback received return value: {output_value}" + ) + + if not long: + has_update = _set_side_update(callback_ctx, response) or has_output + + if not has_update: + raise PreventUpdate + + if len(ComponentRegistry.registry) != len(original_packages): + diff_packages = list( + set(ComponentRegistry.registry).difference(original_packages) + ) + if not allow_dynamic_callbacks: + raise ImportedInsideCallbackError( + f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" + "You can set `_allow_dynamic_callbacks` to allow for development purpose only." + ) + dist = app.get_dist(diff_packages) + response["dist"] = dist + return response.update({"response": component_ids}) + + +# pylint: disable=too-many-branches,too-many-statements def register_callback( callback_list, callback_map, config_prevent_initial_callbacks, *_args, **_kwargs ): @@ -353,259 +615,6 @@ def register_callback( no_output=not has_output, ) - def initialize_context(args, kwargs, inputs_state_indices): - """Initialize context and validate output specifications.""" - app = kwargs.pop("app", None) - output_spec = kwargs.pop("outputs_list") - callback_ctx = kwargs.pop( - "callback_context", AttributeDict({"updated_props": {}}) - ) - context_value.set(callback_ctx) - original_packages = set(ComponentRegistry.registry) - - if has_output: - _validate.validate_output_spec(insert_output, output_spec, Output) - - func_args, func_kwargs = _validate.validate_and_group_input_args( - args, inputs_state_indices - ) - return ( - output_spec, - callback_ctx, - func_args, - func_kwargs, - app, - original_packages, - False, - ) - - def get_callback_manager(kwargs): - """Set up the long callback and manage jobs.""" - callback_manager = long.get( - "manager", kwargs.get("long_callback_manager", None) - ) - if not callback_manager: - raise MissingLongCallbackManagerError( - "Running `long` callbacks requires a manager to be installed.\n" - "Available managers:\n" - "- Diskcache (`pip install dash[diskcache]`) to run callbacks in a separate Process" - " and store results on the local filesystem.\n" - "- Celery (`pip install dash[celery]`) to run callbacks in a celery worker" - " and store results on redis.\n" - ) - - old_job = flask.request.args.getlist("oldJob") - - if old_job: - for job in old_job: - callback_manager.terminate_job(job) - - return callback_manager - - def setup_long_callback( - kwargs, - long, - long_key, - func, - func_args, - func_kwargs, - ): - """Set up the long callback and manage jobs.""" - callback_manager = get_callback_manager(kwargs) - - progress_outputs = long.get("progress") - - cache_key = callback_manager.build_cache_key( - func, - # Inputs provided as dict is kwargs. - func_args if func_args else func_kwargs, - long.get("cache_args_to_ignore", []), - ) - - job_fn = callback_manager.func_registry.get(long_key) - - ctx_value = AttributeDict(**context_value.get()) - ctx_value.ignore_register_page = True - ctx_value.pop("background_callback_manager") - ctx_value.pop("dash_response") - - job = callback_manager.call_job_fn( - cache_key, - job_fn, - func_args if func_args else func_kwargs, - ctx_value, - ) - - data = { - "cacheKey": cache_key, - "job": job, - } - - cancel = long.get("cancel") - if cancel: - data["cancel"] = cancel - - progress_default = long.get("progressDefault") - if progress_default: - data["progressDefault"] = { - str(o): x for o, x in zip(progress_outputs, progress_default) - } - return to_json(data) - - def progress_long_callback(response, callback_manager): - progress_outputs = long.get("progress") - cache_key = flask.request.args.get("cacheKey") - - if progress_outputs: - # Get the progress before the result as it would be erased after the results. - progress = callback_manager.get_progress(cache_key) - if progress: - response.update( - { - "progress": { - str(x): progress[i] for i, x in enumerate(progress_outputs) - } - } - ) - - def update_long_callback(error_handler, callback_ctx, response, kwargs): - """Set up the long callback and manage jobs.""" - callback_manager = get_callback_manager(kwargs) - - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") - - progress_long_callback(response, callback_manager) - - output_value = callback_manager.get_result(cache_key, job_id) - - return handle_rest_long_callback( - output_value, callback_manager, response, error_handler, callback_ctx - ) - - def handle_rest_long_callback( - output_value, - callback_manager, - response, - error_handler, - callback_ctx, - has_update=False, - ): - cache_key = flask.request.args.get("cacheKey") - job_id = flask.request.args.get("job") - # Must get job_running after get_result since get_results terminates it. - job_running = callback_manager.job_running(job_id) - if not job_running and output_value is callback_manager.UNDEFINED: - # Job canceled -> no output to close the loop. - output_value = NoUpdate() - - elif isinstance(output_value, dict) and "long_callback_error" in output_value: - error = output_value.get("long_callback_error", {}) - exc = LongCallbackError( - f"An error occurred inside a long callback: {error['msg']}\n{error['tb']}" - ) - if error_handler: - output_value = error_handler(exc) - - if output_value is None: - output_value = NoUpdate() - # set_props from the error handler uses the original ctx - # instead of manager.get_updated_props since it runs in the - # request process. - has_update = ( - _set_side_update(callback_ctx, response) or output_value is not None - ) - else: - raise exc - - if job_running and output_value is not callback_manager.UNDEFINED: - # cached results. - callback_manager.terminate_job(job_id) - - if multi and isinstance(output_value, (list, tuple)): - output_value = [ - NoUpdate() if NoUpdate.is_no_update(r) else r for r in output_value - ] - updated_props = callback_manager.get_updated_props(cache_key) - if len(updated_props) > 0: - response["sideUpdate"] = updated_props - has_update = True - - if output_value is callback_manager.UNDEFINED: - return to_json(response), has_update, True - return output_value, has_update, False - - def prepare_response( - output_value, - output_spec, - multi, - response, - callback_ctx, - app, - original_packages, - long, - has_update, - ): - """Prepare the response object based on the callback output.""" - component_ids = collections.defaultdict(dict) - - if has_output: - if not multi: - output_value, output_spec = [output_value], [output_spec] - flat_output_values = output_value - else: - if isinstance(output_value, (list, tuple)): - # For multi-output, allow top-level collection to be - # list or tuple - output_value = list(output_value) - if NoUpdate.is_no_update(output_value): - flat_output_values = [output_value] - else: - # Flatten grouping and validate grouping structure - flat_output_values = flatten_grouping(output_value, output) - - if not NoUpdate.is_no_update(output_value): - _validate.validate_multi_return( - output_spec, flat_output_values, callback_id - ) - - for val, spec in zip(flat_output_values, output_spec): - if NoUpdate.is_no_update(val): - continue - for vali, speci in ( - zip(val, spec) if isinstance(spec, list) else [[val, spec]] - ): - if not NoUpdate.is_no_update(vali): - has_update = True - id_str = stringify_id(speci["id"]) - prop = clean_property_name(speci["property"]) - component_ids[id_str][prop] = vali - - else: - if output_value is not None: - raise InvalidCallbackReturnValue( - f"No-output callback received return value: {output_value}" - ) - - if not long: - has_update = _set_side_update(callback_ctx, response) or has_output - - if not has_update: - raise PreventUpdate - - if len(ComponentRegistry.registry) != len(original_packages): - diff_packages = list( - set(ComponentRegistry.registry).difference(original_packages) - ) - if not allow_dynamic_callbacks: - raise ImportedInsideCallbackError( - f"Component librar{'y' if len(diff_packages) == 1 else 'ies'} was imported during callback.\n" - "You can set `_allow_dynamic_callbacks` to allow for development purpose only." - ) - dist = app.get_dist(diff_packages) - response["dist"] = dist - return response.update({"response": component_ids}) - # pylint: disable=too-many-locals def wrap_func(func): if long is not None: @@ -628,14 +637,16 @@ def add_context(*args, **kwargs): app, original_packages, has_update, - ) = initialize_context(args, kwargs, inputs_state_indices) + ) = _initialize_context( + args, kwargs, inputs_state_indices, has_output, insert_output + ) response: dict = {"multi": True} try: if long is not None: if not flask.request.args.get("cacheKey"): - return setup_long_callback( + return _setup_long_callback( kwargs, long, long_key, @@ -644,8 +655,8 @@ def add_context(*args, **kwargs): func_kwargs, ) - output_value, has_update, skip = update_long_callback( - error_handler, callback_ctx, response, kwargs + output_value, has_update, skip = _update_long_callback( + error_handler, callback_ctx, response, kwargs, long, multi ) if skip: return output_value @@ -661,7 +672,7 @@ def add_context(*args, **kwargs): else: raise err - prepare_response( + _prepare_response( output_value, output_spec, multi, @@ -671,6 +682,10 @@ def add_context(*args, **kwargs): original_packages, long, has_update, + has_output, + output, + callback_id, + allow_dynamic_callbacks, ) try: jsonResponse = to_json(response) @@ -692,14 +707,16 @@ async def async_add_context(*args, **kwargs): app, original_packages, has_update, - ) = initialize_context(args, kwargs, inputs_state_indices) + ) = _initialize_context( + args, kwargs, inputs_state_indices, has_output, insert_output + ) response: dict = {"multi": True} try: if long is not None: if not flask.request.args.get("cacheKey"): - return setup_long_callback( + return _setup_long_callback( kwargs, long, long_key, @@ -707,8 +724,8 @@ async def async_add_context(*args, **kwargs): func_args, func_kwargs, ) - output_value, has_update, skip = update_long_callback( - error_handler, callback_ctx, response, kwargs + output_value, has_update, skip = _update_long_callback( + error_handler, callback_ctx, response, kwargs, long, multi ) if skip: return output_value @@ -726,7 +743,7 @@ async def async_add_context(*args, **kwargs): else: raise err - prepare_response( + _prepare_response( output_value, output_spec, multi, @@ -736,6 +753,10 @@ async def async_add_context(*args, **kwargs): original_packages, long, has_update, + has_output, + output, + callback_id, + allow_dynamic_callbacks, ) try: jsonResponse = to_json(response) From 17c824d23bee115665cdf9d2379fa434e9995ad7 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Fri, 6 Dec 2024 17:41:45 -0500 Subject: [PATCH 22/38] adding async tests --- .circleci/config.yml | 54 + tests/integration/async/__init__.py | 0 .../integration/async/test_async_callbacks.py | 966 ++++++++++++++++++ tests/utils.py | 8 + 4 files changed, 1028 insertions(+) create mode 100644 tests/integration/async/__init__.py create mode 100644 tests/integration/async/test_async_callbacks.py create mode 100644 tests/utils.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 982c43b373..ac0b907c95 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -219,6 +219,57 @@ jobs: - store_artifacts: path: /tmp/dash_artifacts + test-312-async: &test + working_directory: ~/dash + docker: + - image: cimg/python:3.12.1-browsers + auth: + username: dashautomation + password: $DASH_PAT_DOCKERHUB + environment: + PERCY_ENABLE: 1 + PERCY_PARALLEL_TOTAL: -1 + PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: True + PYVERSION: python312 + REDIS_URL: redis://localhost:6379 + - image: cimg/redis:6.2.6 + auth: + username: dashautomation + password: $DASH_PAT_DOCKERHUB + parallelism: 3 + steps: + - checkout: + path: ~/dash + - run: sudo apt-get update + - run: echo $PYVERSION > ver.txt + - run: cat requirements/*.txt > requirements-all.txt + - restore_cache: + key: dep-{{ checksum ".circleci/config.yml" }}-{{ checksum "ver.txt" }}-{{ checksum "requirements-all.txt" }} + - browser-tools/install-browser-tools: + chrome-version: 120.0.6099.71 + install-firefox: false + install-geckodriver: false + - attach_workspace: + at: ~/dash + - run: + name: ️️🏗️ Install package + command: | + . venv/bin/activate + npm ci + pip install dash-package/dash-package.tar.gz[async,ci,dev,testing,celery,diskcache] --progress-bar off + pip list | grep dash + - run: + name: 🧪 Run Integration Tests + command: | + . venv/bin/activate + npm run citest.integration + - store_artifacts: + path: test-reports + - store_test_results: + path: test-reports + - store_artifacts: + path: /tmp/dash_artifacts + test-312-react-18: <<: *test docker: @@ -627,6 +678,9 @@ workflows: - test-312: requires: - install-dependencies-312 + - test-312-async: + requires: + - install-dependencies-312 - test-312-react-18: requires: - install-dependencies-312 diff --git a/tests/integration/async/__init__.py b/tests/integration/async/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/async/test_async_callbacks.py b/tests/integration/async/test_async_callbacks.py new file mode 100644 index 0000000000..fd47c7fa05 --- /dev/null +++ b/tests/integration/async/test_async_callbacks.py @@ -0,0 +1,966 @@ +import json +import os +from multiprocessing import Lock, Value +import pytest +import time + +import numpy as np +import werkzeug + +from dash_test_components import ( + AsyncComponent, + CollapseComponent, + DelayedEventComponent, + FragmentComponent, +) +from dash import ( + Dash, + Input, + Output, + State, + html, + dcc, + dash_table, + no_update, +) +from dash.exceptions import PreventUpdate +from tests.integration.utils import json_engine +from tests.utils import test_async + + +def test_async_cbsc001_simple_callback(dash_duo): + if not test_async(): + return + lock = Lock() + + app = Dash(__name__) + app.layout = html.Div( + [ + dcc.Input(id="input", value="initial value"), + html.Div(html.Div([1.5, None, "string", html.Div(id="output-1")])), + ] + ) + call_count = Value("i", 0) + + @app.callback(Output("output-1", "children"), [Input("input", "value")]) + async def update_output(value): + with lock: + call_count.value = call_count.value + 1 + return value + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output-1", "initial value") + + input_ = dash_duo.find_element("#input") + dash_duo.clear_input(input_) + + for key in "hello world": + with lock: + input_.send_keys(key) + + dash_duo.wait_for_text_to_equal("#output-1", "hello world") + + assert call_count.value == 2 + len("hello world"), "initial count + each key stroke" + + assert not dash_duo.redux_state_is_loading + + assert dash_duo.get_logs() == [] + + +def test_async_cbsc002_callbacks_generating_children(dash_duo): + """Modify the DOM tree by adding new components in the callbacks.""" + if not test_async(): + return + # some components don't exist in the initial render + app = Dash(__name__, suppress_callback_exceptions=True) + app.layout = html.Div( + [dcc.Input(id="input", value="initial value"), html.Div(id="output")] + ) + + @app.callback(Output("output", "children"), [Input("input", "value")]) + async def pad_output(input): + return html.Div( + [ + dcc.Input(id="sub-input-1", value="sub input initial value"), + html.Div(id="sub-output-1"), + ] + ) + + call_count = Value("i", 0) + + @app.callback(Output("sub-output-1", "children"), [Input("sub-input-1", "value")]) + async def update_input(value): + call_count.value += 1 + return value + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#sub-output-1", "sub input initial value") + + assert call_count.value == 1, "called once at initial stage" + + pad_input, pad_div = dash_duo.dash_innerhtml_dom.select_one( + "#output > div" + ).contents + + assert ( + pad_input.attrs["value"] == "sub input initial value" + and pad_input.attrs["id"] == "sub-input-1" + ) + assert pad_input.name == "input" + + assert ( + pad_div.text == pad_input.attrs["value"] and pad_div.get("id") == "sub-output-1" + ), "the sub-output-1 content reflects to sub-input-1 value" + + paths = dash_duo.redux_state_paths + assert paths["objs"] == {} + assert paths["strs"] == { + "input": ["props", "children", 0], + "output": ["props", "children", 1], + "sub-input-1": [ + "props", + "children", + 1, + "props", + "children", + "props", + "children", + 0, + ], + "sub-output-1": [ + "props", + "children", + 1, + "props", + "children", + "props", + "children", + 1, + ], + }, "the paths should include these new output IDs" + + # editing the input should modify the sub output + dash_duo.find_element("#sub-input-1").send_keys("deadbeef") + + # the total updates is initial one + the text input changes + dash_duo.wait_for_text_to_equal( + "#sub-output-1", pad_input.attrs["value"] + "deadbeef" + ) + + assert not dash_duo.redux_state_is_loading, "loadingMap is empty" + + assert dash_duo.get_logs() == [], "console is clean" + + +def test_async_cbsc003_callback_with_unloaded_async_component(dash_duo): + if not test_async(): + return + app = Dash() + app.layout = html.Div( + children=[ + dcc.Tabs( + children=[ + dcc.Tab( + children=[ + html.Button(id="btn", children="Update Input"), + html.Div(id="output", children=["Hello"]), + ] + ), + dcc.Tab(children=dash_table.DataTable(id="other-table")), + ] + ) + ] + ) + + @app.callback(Output("output", "children"), [Input("btn", "n_clicks")]) + async def update_out(n_clicks): + if n_clicks is None: + raise PreventUpdate + + return "Bye" + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output", "Hello") + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#output", "Bye") + assert dash_duo.get_logs() == [] + + +def test_async_cbsc004_callback_using_unloaded_async_component(dash_duo): + if not test_async(): + return + app = Dash() + app.layout = html.Div( + [ + dcc.Tabs( + [ + dcc.Tab("boo!"), + dcc.Tab( + dash_table.DataTable( + id="table", + columns=[{"id": "a", "name": "A"}], + data=[{"a": "b"}], + ) + ), + ] + ), + html.Button("Update Input", id="btn"), + html.Div("Hello", id="output"), + html.Div(id="output2"), + ] + ) + + @app.callback( + Output("output", "children"), + [Input("btn", "n_clicks")], + [State("table", "data")], + ) + async def update_out(n_clicks, data): + return json.dumps(data) + " - " + str(n_clicks) + + @app.callback( + Output("output2", "children"), + [Input("btn", "n_clicks")], + [State("table", "derived_viewport_data")], + ) + async def update_out2(n_clicks, data): + return json.dumps(data) + " - " + str(n_clicks) + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output", '[{"a": "b"}] - None') + dash_duo.wait_for_text_to_equal("#output2", "null - None") + + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#output", '[{"a": "b"}] - 1') + dash_duo.wait_for_text_to_equal("#output2", "null - 1") + + dash_duo.find_element(".tab:not(.tab--selected)").click() + dash_duo.wait_for_text_to_equal("#table th", "A") + # table props are in state so no change yet + dash_duo.wait_for_text_to_equal("#output2", "null - 1") + + # repeat a few times, since one of the failure modes I saw during dev was + # intermittent - but predictably so? + for i in range(2, 10): + expected = '[{"a": "b"}] - ' + str(i) + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#output", expected) + # now derived props are available + dash_duo.wait_for_text_to_equal("#output2", expected) + + assert dash_duo.get_logs() == [] + + +@pytest.mark.parametrize("engine", ["json", "orjson"]) +def test_async_cbsc005_children_types(dash_duo, engine): + if not test_async(): + return + with json_engine(engine): + app = Dash() + app.layout = html.Div([html.Button(id="btn"), html.Div("init", id="out")]) + + outputs = [ + [None, ""], + ["a string", "a string"], + [123, "123"], + [123.45, "123.45"], + [[6, 7, 8], "678"], + [["a", "list", "of", "strings"], "alistofstrings"], + [["strings", 2, "numbers"], "strings2numbers"], + [["a string", html.Div("and a div")], "a string\nand a div"], + ] + + @app.callback(Output("out", "children"), [Input("btn", "n_clicks")]) + async def set_children(n): + if n is None or n > len(outputs): + return no_update + return outputs[n - 1][0] + + dash_duo.start_server(app) + dash_duo.wait_for_text_to_equal("#out", "init") + + for children, text in outputs: + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#out", text) + + +@pytest.mark.parametrize("engine", ["json", "orjson"]) +def test_async_cbsc006_array_of_objects(dash_duo, engine): + if not test_async(): + return + with json_engine(engine): + app = Dash() + app.layout = html.Div( + [html.Button(id="btn"), dcc.Dropdown(id="dd"), html.Div(id="out")] + ) + + @app.callback(Output("dd", "options"), [Input("btn", "n_clicks")]) + async def set_options(n): + return [{"label": "opt{}".format(i), "value": i} for i in range(n or 0)] + + @app.callback(Output("out", "children"), [Input("dd", "options")]) + async def set_out(opts): + print(repr(opts)) + return len(opts) + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#out", "0") + for i in range(5): + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#out", str(i + 1)) + dash_duo.select_dcc_dropdown("#dd", "opt{}".format(i)) + + +@pytest.mark.xfail( + condition=werkzeug.__version__ in ("2.1.0", "2.1.1"), + reason="Bug with 204 and Transfer-Encoding", + strict=False, +) +@pytest.mark.parametrize("refresh", [False, True]) +def test_async_cbsc007_parallel_updates(refresh, dash_duo): + # This is a funny case, that seems to mostly happen with dcc.Location + # but in principle could happen in other cases too: + # A callback chain (in this case the initial hydration) is set to update a + # value, but after that callback is queued and before it returns, that value + # is also set explicitly from the front end (in this case Location.pathname, + # which gets set in its componentDidMount during the render process, and + # callbacks are delayed until after rendering is finished because of the + # async table) + # At one point in the wildcard PR #1103, changing from requestQueue to + # pendingCallbacks, calling PreventUpdate in the callback would also skip + # any callbacks that depend on pathname, despite the new front-end-provided + # value. + if not test_async(): + return + app = Dash() + + app.layout = html.Div( + [ + dcc.Location(id="loc", refresh=refresh), + html.Button("Update path", id="btn"), + dash_table.DataTable(id="t", columns=[{"name": "a", "id": "a"}]), + html.Div(id="out"), + ] + ) + + @app.callback(Output("t", "data"), [Input("loc", "pathname")]) + async def set_data(path): + return [{"a": (path or repr(path)) + ":a"}] + + @app.callback( + Output("out", "children"), [Input("loc", "pathname"), Input("t", "data")] + ) + async def set_out(path, data): + return json.dumps(data) + " - " + (path or repr(path)) + + @app.callback(Output("loc", "pathname"), [Input("btn", "n_clicks")]) + async def set_path(n): + if not n: + raise PreventUpdate + + return "/{0}".format(n) + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#out", '[{"a": "/:a"}] - /') + dash_duo.find_element("#btn").click() + # the refresh=True case here is testing that we really do get the right + # pathname, not the prevented default value from the layout. + dash_duo.wait_for_text_to_equal("#out", '[{"a": "/1:a"}] - /1') + if not refresh: + dash_duo.find_element("#btn").click() + dash_duo.wait_for_text_to_equal("#out", '[{"a": "/2:a"}] - /2') + + +def test_async_cbsc008_wildcard_prop_callbacks(dash_duo): + if not test_async(): + return + lock = Lock() + + app = Dash(__name__) + app.layout = html.Div( + [ + dcc.Input(id="input", value="initial value"), + html.Div( + html.Div( + [ + 1.5, + None, + "string", + html.Div( + id="output-1", + **{"data-cb": "initial value", "aria-cb": "initial value"}, + ), + ] + ) + ), + ] + ) + + input_call_count = Value("i", 0) + percy_enabled = Value("b", False) + + def snapshot(name): + percy_enabled.value = os.getenv("PERCY_ENABLE", "") != "" + dash_duo.percy_snapshot(name=name) + percy_enabled.value = False + + @app.callback(Output("output-1", "data-cb"), [Input("input", "value")]) + async def update_data(value): + with lock: + if not percy_enabled.value: + input_call_count.value += 1 + return value + + @app.callback(Output("output-1", "children"), [Input("output-1", "data-cb")]) + async def update_text(data): + return data + + dash_duo.start_server(app) + dash_duo.wait_for_text_to_equal("#output-1", "initial value") + assert ( + dash_duo.find_element("#output-1").get_attribute("data-cb") == "initial value" + ) + + input1 = dash_duo.find_element("#input") + dash_duo.clear_input(input1) + + for key in "hello world": + with lock: + input1.send_keys(key) + + dash_duo.wait_for_text_to_equal("#output-1", "hello world") + assert dash_duo.find_element("#output-1").get_attribute("data-cb") == "hello world" + + # an initial call, one for clearing the input + # and one for each hello world character + assert input_call_count.value == 2 + len("hello world") + + assert dash_duo.get_logs() == [] + + +def test_async_cbsc009_callback_using_unloaded_async_component_and_graph(dash_duo): + if not test_async(): + return + app = Dash(__name__) + app.layout = FragmentComponent( + [ + CollapseComponent([AsyncComponent(id="async", value="A")], id="collapse"), + html.Button("n", id="n"), + DelayedEventComponent(id="d"), + html.Div("Output init", id="output"), + ] + ) + + @app.callback( + Output("output", "children"), + Output("collapse", "display"), + Input("n", "n_clicks"), + Input("d", "n_clicks"), + Input("async", "value"), + ) + async def content(n, d, v): + return json.dumps([n, d, v]), (n or 0) > 1 + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output", '[null, null, "A"]') + dash_duo.wait_for_element("#d").click() + + dash_duo.wait_for_text_to_equal("#output", '[null, 1, "A"]') + + dash_duo.wait_for_element("#n").click() + dash_duo.wait_for_text_to_equal("#output", '[1, 1, "A"]') + + dash_duo.wait_for_element("#d").click() + dash_duo.wait_for_text_to_equal("#output", '[1, 2, "A"]') + + dash_duo.wait_for_no_elements("#async") + + dash_duo.wait_for_element("#n").click() + dash_duo.wait_for_text_to_equal("#output", '[2, 2, "A"]') + dash_duo.wait_for_text_to_equal("#async", "A") + + assert dash_duo.get_logs() == [] + + +def test_async_cbsc010_event_properties(dash_duo): + if not test_async(): + return + app = Dash(__name__) + app.layout = html.Div([html.Button("Click Me", id="button"), html.Div(id="output")]) + + call_count = Value("i", 0) + + @app.callback(Output("output", "children"), [Input("button", "n_clicks")]) + async def update_output(n_clicks): + if not n_clicks: + raise PreventUpdate + call_count.value += 1 + return "Click" + + dash_duo.start_server(app) + dash_duo.wait_for_text_to_equal("#output", "") + assert call_count.value == 0 + + dash_duo.find_element("#button").click() + dash_duo.wait_for_text_to_equal("#output", "Click") + assert call_count.value == 1 + + +def test_async_cbsc011_one_call_for_multiple_outputs_initial(dash_duo): + if not test_async(): + return + app = Dash(__name__) + call_count = Value("i", 0) + + app.layout = html.Div( + [ + html.Div( + [ + dcc.Input(value="Input {}".format(i), id="input-{}".format(i)) + for i in range(10) + ] + ), + html.Div(id="container"), + dcc.RadioItems(), + ] + ) + + @app.callback( + Output("container", "children"), + [Input("input-{}".format(i), "value") for i in range(10)], + ) + async def dynamic_output(*args): + call_count.value += 1 + return json.dumps(args) + + dash_duo.start_server(app) + dash_duo.wait_for_text_to_equal("#input-9", "Input 9") + dash_duo.wait_for_contains_text("#container", "Input 9") + + assert call_count.value == 1 + inputs = [f'"Input {i}"' for i in range(10)] + expected = f'[{", ".join(inputs)}]' + dash_duo.wait_for_text_to_equal("#container", expected) + assert dash_duo.get_logs() == [] + + +def test_async_cbsc012_one_call_for_multiple_outputs_update(dash_duo): + if not test_async(): + return + app = Dash(__name__, suppress_callback_exceptions=True) + call_count = Value("i", 0) + + app.layout = html.Div( + [ + html.Button(id="display-content", children="Display Content"), + html.Div(id="container"), + dcc.RadioItems(), + ] + ) + + @app.callback(Output("container", "children"), Input("display-content", "n_clicks")) + async def display_output(n_clicks): + if not n_clicks: + return "" + return html.Div( + [ + html.Div( + [ + dcc.Input(value="Input {}".format(i), id="input-{}".format(i)) + for i in range(10) + ] + ), + html.Div(id="dynamic-output"), + ] + ) + + @app.callback( + Output("dynamic-output", "children"), + [Input("input-{}".format(i), "value") for i in range(10)], + ) + async def dynamic_output(*args): + call_count.value += 1 + return json.dumps(args) + + dash_duo.start_server(app) + + dash_duo.find_element("#display-content").click() + + dash_duo.wait_for_text_to_equal("#input-9", "Input 9") + + ### order altered from the original, as these are non-blocking callbacks now + inputs = [f'"Input {i}"' for i in range(10)] + expected = f'[{", ".join(inputs)}]' + dash_duo.wait_for_text_to_equal("#dynamic-output", expected) + assert call_count.value == 1 + assert dash_duo.get_logs() == [] + + +def test_async_cbsc013_multi_output_out_of_order(dash_duo): + if not test_async(): + return + app = Dash(__name__) + app.layout = html.Div( + [ + html.Button("Click", id="input", n_clicks=0), + html.Div(id="output1"), + html.Div(id="output2"), + ] + ) + + call_count = Value("i", 0) + lock = Lock() + + @app.callback( + Output("output1", "children"), + Output("output2", "children"), + Input("input", "n_clicks"), + ) + async def update_output(n_clicks): + call_count.value += 1 + if n_clicks == 1: + with lock: + pass + return n_clicks, n_clicks + 1 + + dash_duo.start_server(app) + + button = dash_duo.find_element("#input") + with lock: + button.click() + button.click() + + dash_duo.wait_for_text_to_equal("#output1", "2") + dash_duo.wait_for_text_to_equal("#output2", "3") + assert call_count.value == 3 + assert dash_duo.driver.execute_script("return !window.store.getState().isLoading;") + assert dash_duo.get_logs() == [] + + +def test_async_cbsc014_multiple_properties_update_at_same_time_on_same_component( + dash_duo, +): + if not test_async(): + return + call_count = Value("i", 0) + timestamp_1 = Value("d", -5) + timestamp_2 = Value("d", -5) + + app = Dash(__name__) + app.layout = html.Div( + [ + html.Div(id="container"), + html.Button("Click 1", id="button-1", n_clicks=0, n_clicks_timestamp=-1), + html.Button("Click 2", id="button-2", n_clicks=0, n_clicks_timestamp=-1), + ] + ) + + @app.callback( + Output("container", "children"), + Input("button-1", "n_clicks"), + Input("button-1", "n_clicks_timestamp"), + Input("button-2", "n_clicks"), + Input("button-2", "n_clicks_timestamp"), + ) + async def update_output(n1, t1, n2, t2): + call_count.value += 1 + timestamp_1.value = t1 + timestamp_2.value = t2 + return "{}, {}".format(n1, n2) + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#container", "0, 0") + assert timestamp_1.value == -1 + assert timestamp_2.value == -1 + assert call_count.value == 1 + + dash_duo.find_element("#button-1").click() + dash_duo.wait_for_text_to_equal("#container", "1, 0") + assert timestamp_1.value > ((time.time() - (24 * 60 * 60)) * 1000) + assert timestamp_2.value == -1 + assert call_count.value == 2 + prev_timestamp_1 = timestamp_1.value + + dash_duo.find_element("#button-2").click() + dash_duo.wait_for_text_to_equal("#container", "1, 1") + assert timestamp_1.value == prev_timestamp_1 + assert timestamp_2.value > ((time.time() - 24 * 60 * 60) * 1000) + assert call_count.value == 3 + prev_timestamp_2 = timestamp_2.value + + dash_duo.find_element("#button-2").click() + dash_duo.wait_for_text_to_equal("#container", "1, 2") + assert timestamp_1.value == prev_timestamp_1 + assert timestamp_2.value > prev_timestamp_2 + assert timestamp_2.value > timestamp_1.value + assert call_count.value == 4 + + +def test_async_cbsc015_input_output_callback(dash_duo): + if not test_async(): + return + return + # disabled because this is trying to look at a synchronous lock in an async environment + # + # import asyncio + # lock = asyncio.Lock() + # + # call_count = Value("i", 0) + # + # app = Dash(__name__) + # app.layout = html.Div( + # [html.Div("0", id="input-text"), dcc.Input(id="input", type="number", value=0)] + # ) + # + # @app.callback( + # Output("input", "value"), + # Input("input", "value"), + # ) + # async def circular_output(v): + # ctx = callback_context + # if not ctx.triggered: + # value = v + # else: + # value = v + 1 + # return value + # + # @app.callback( + # Output("input-text", "children"), + # Input("input", "value"), + # ) + # async def follower_output(v): + # async with lock: + # call_count.value = call_count.value + 1 + # return str(v) + # + # dash_duo.start_server(app) + # + # input_ = dash_duo.find_element("#input") + # for key in "2": + # async with lock: + # input_.send_keys(key) + # + # dash_duo.wait_for_text_to_equal("#input-text", "3") + # + # assert call_count.value == 2, "initial + changed once" + # + # assert not dash_duo.redux_state_is_loading + # + # assert dash_duo.get_logs() == [] + + +def test_async_cbsc016_extra_components_callback(dash_duo): + if not test_async(): + return + lock = Lock() + + app = Dash(__name__) + app._extra_components.append(dcc.Store(id="extra-store", data=123)) + + app.layout = html.Div( + [ + dcc.Input(id="input", value="initial value"), + html.Div(html.Div([1.5, None, "string", html.Div(id="output-1")])), + ] + ) + store_data = Value("i", 0) + + @app.callback( + Output("output-1", "children"), + [Input("input", "value"), Input("extra-store", "data")], + ) + async def update_output(value, data): + with lock: + store_data.value = data + return value + + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output-1", "initial value") + + input_ = dash_duo.find_element("#input") + dash_duo.clear_input(input_) + input_.send_keys("A") + + dash_duo.wait_for_text_to_equal("#output-1", "A") + + assert store_data.value == 123 + assert dash_duo.get_logs() == [] + + +def test_async_cbsc017_callback_directly_callable(): + if not test_async(): + return + ## unneeded + # app = Dash(__name__) + # app.layout = html.Div( + # [ + # dcc.Input(id="input", value="initial value"), + # html.Div(html.Div([1.5, None, "string", html.Div(id="output-1")])), + # ] + # ) + # + # @app.callback( + # Output("output-1", "children"), + # [Input("input", "value")], + # ) + # async def update_output(value): + # return f"returning {value}" + # + # assert update_output("my-value") == "returning my-value" + + +def test_async_cbsc018_callback_ndarray_output(dash_duo): + if not test_async(): + return + app = Dash(__name__) + app.layout = html.Div([dcc.Store(id="output"), html.Button("click", id="clicker")]) + + @app.callback( + Output("output", "data"), + Input("clicker", "n_clicks"), + ) + async def on_click(_): + return np.array([[1, 2, 3], [4, 5, 6]], np.int32) + + dash_duo.start_server(app) + + assert dash_duo.get_logs() == [] + + +def test_async_cbsc019_callback_running(dash_duo): + if not test_async(): + return + lock = Lock() + app = Dash(__name__) + + app.layout = html.Div( + [ + html.Div("off", id="running"), + html.Button("start", id="start"), + html.Div(id="output"), + ] + ) + + @app.callback( + Output("output", "children"), + Input("start", "n_clicks"), + running=[[Output("running", "children"), html.B("on", id="content"), "off"]], + prevent_initial_call=True, + ) + async def on_click(_): + with lock: + pass + return "done" + + dash_duo.start_server(app) + with lock: + dash_duo.find_element("#start").click() + dash_duo.wait_for_text_to_equal("#content", "on") + + dash_duo.wait_for_text_to_equal("#output", "done") + dash_duo.wait_for_text_to_equal("#running", "off") + + +def test_async_cbsc020_callback_running_non_existing_component(dash_duo): + if not test_async(): + return + lock = Lock() + app = Dash(__name__, suppress_callback_exceptions=True) + + app.layout = html.Div( + [ + html.Button("start", id="start"), + html.Div(id="output"), + ] + ) + + @app.callback( + Output("output", "children"), + Input("start", "n_clicks"), + running=[ + [ + Output("non_existent_component", "children"), + html.B("on", id="content"), + "off", + ] + ], + prevent_initial_call=True, + ) + async def on_click(_): + with lock: + pass + return "done" + + dash_duo.start_server(app) + with lock: + dash_duo.find_element("#start").click() + + dash_duo.wait_for_text_to_equal("#output", "done") + + +def test_async_cbsc021_callback_running_non_existing_component(dash_duo): + if not test_async(): + return + lock = Lock() + app = Dash(__name__) + + app.layout = html.Div( + [ + html.Button("start", id="start"), + html.Div(id="output"), + ] + ) + + @app.callback( + Output("output", "children"), + Input("start", "n_clicks"), + running=[ + [ + Output("non_existent_component", "children"), + html.B("on", id="content"), + "off", + ] + ], + prevent_initial_call=True, + ) + async def on_click(_): + with lock: + pass + return "done" + + dash_duo.start_server( + app, + debug=True, + use_reloader=False, + use_debugger=True, + dev_tools_hot_reload=False, + ) + with lock: + dash_duo.find_element("#start").click() + + dash_duo.wait_for_text_to_equal("#output", "done") + error_title = "ID running component not found in layout" + error_message = [ + "Component defined in running keyword not found in layout.", + 'Component id: "non_existent_component"', + "This ID was used in the callback(s) for Output(s):", + "output.children", + "You can suppress this exception by setting", + "`suppress_callback_exceptions=True`.", + ] + # The error should show twice, once for trying to set running on and once for + # turning it off. + dash_duo.wait_for_text_to_equal(dash_duo.devtools_error_count_locator, "2") + for error in dash_duo.find_elements(".dash-fe-error__title"): + assert error.text == error_title + for error_text in dash_duo.find_elements(".dash-backend-error"): + assert all(line in error_text for line in error_message) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000000..4c1cc2e5e1 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,8 @@ +def test_async(): + try: + + import asgiref # pylint: disable=unused-import, # noqa: F401 + + return True + except ImportError: + return False From fdfd0581d76aa523218334abae8ec86c7cb3ed00 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Fri, 6 Dec 2024 23:07:55 -0500 Subject: [PATCH 23/38] bypassing `test_rdrh003_refresh_jwt` as this fails with 3 failed requests vs 2 --- tests/integration/renderer/test_request_hooks.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/integration/renderer/test_request_hooks.py b/tests/integration/renderer/test_request_hooks.py index 7f707cf823..466beb4918 100644 --- a/tests/integration/renderer/test_request_hooks.py +++ b/tests/integration/renderer/test_request_hooks.py @@ -2,6 +2,7 @@ import functools import flask import pytest +from tests.utils import test_async from flaky import flaky @@ -203,6 +204,9 @@ def update_output(value): @flaky(max_runs=3) @pytest.mark.parametrize("expiry_code", [401, 400]) def test_rdrh003_refresh_jwt(expiry_code, dash_duo): + if test_async(): + return # if async, bypass this test as this ends up wrapping async funcs and results in 3 failed requests + app = Dash(__name__) app.index_string = """ From fb691c3b9ec265b8ae0e5dc43d1666da55dfc50c Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Fri, 6 Dec 2024 23:27:18 -0500 Subject: [PATCH 24/38] removing `__init__` from `async` directory --- tests/integration/async/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/integration/async/__init__.py diff --git a/tests/integration/async/__init__.py b/tests/integration/async/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 From 2473546a3d83ba2e01458ff2735d50b6595d8801 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 14:53:33 -0500 Subject: [PATCH 25/38] adjusting `jwt` test to adjust value in the MultiProcessing and removing the clearing input values to make sure that we were only triggering callbacks when we desired. --- .../renderer/test_request_hooks.py | 43 +++++++++++-------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/tests/integration/renderer/test_request_hooks.py b/tests/integration/renderer/test_request_hooks.py index 466beb4918..c735fba11a 100644 --- a/tests/integration/renderer/test_request_hooks.py +++ b/tests/integration/renderer/test_request_hooks.py @@ -1,8 +1,10 @@ +import asyncio import json import functools import flask import pytest from tests.utils import test_async +from multiprocessing import Value from flaky import flaky @@ -201,12 +203,9 @@ def update_output(value): assert dash_duo.get_logs() == [] -@flaky(max_runs=3) +# @flaky(max_runs=3) @pytest.mark.parametrize("expiry_code", [401, 400]) def test_rdrh003_refresh_jwt(expiry_code, dash_duo): - if test_async(): - return # if async, bypass this test as this ends up wrapping async funcs and results in 3 failed requests - app = Dash(__name__) app.index_string = """ @@ -248,28 +247,35 @@ def test_rdrh003_refresh_jwt(expiry_code, dash_duo): ] ) - @app.callback(Output("output-1", "children"), [Input("input", "value")]) + @app.callback(Output("output-1", "children"), [Input("input", "value") + ],prevent_initial_call=True) def update_output(value): + jwt_token.value = len(value) + 1 return value - required_jwt_len = 0 + jwt_token = Value("i", 0) # test with an auth layer that requires a JWT with a certain length def protect_route(func): @functools.wraps(func) def wrap(*args, **kwargs): try: + if flask.request.method == "OPTIONS": return func(*args, **kwargs) token = flask.request.headers.environ.get("HTTP_AUTHORIZATION") - if required_jwt_len and ( - not token or len(token) != required_jwt_len + len("Bearer ") + if jwt_token.value and ( + not token or len(token) != jwt_token.value + len("Bearer ") ): # Read the data to prevent bug with base http server. flask.request.get_json(silent=True) flask.abort(expiry_code, description="JWT Expired " + str(token)) except HTTPException as e: return e + if asyncio.iscoroutinefunction(func): + if test_async(): + from asgiref.sync import async_to_sync # pylint: disable=unused-import, # noqa: F401 + return async_to_sync(func)(*args, **kwargs) return func(*args, **kwargs) return wrap @@ -287,22 +293,21 @@ def wrap(*args, **kwargs): _in = dash_duo.find_element("#input") dash_duo.clear_input(_in) - required_jwt_len = 1 - - _in.send_keys("fired request") + dash_duo.wait_for_text_to_equal("#output-1", "") - dash_duo.wait_for_text_to_equal("#output-1", "fired request") + _in.send_keys(".") + dash_duo.wait_for_text_to_equal("#output-1", ".") dash_duo.wait_for_text_to_equal("#output-token", ".") - required_jwt_len = 2 - - dash_duo.clear_input(_in) - _in.send_keys("fired request again") - - dash_duo.wait_for_text_to_equal("#output-1", "fired request again") + _in.send_keys(".") + dash_duo.wait_for_text_to_equal("#output-1", "..") dash_duo.wait_for_text_to_equal("#output-token", "..") - assert len(dash_duo.get_logs()) == 2 + _in.send_keys(".") + dash_duo.wait_for_text_to_equal("#output-1", "...") + dash_duo.wait_for_text_to_equal("#output-token", "...") + + assert len(dash_duo.get_logs()) == 3 def test_rdrh004_layout_hooks(dash_duo): From b2e88843031fd66af17146350b314a5506dec599 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 14:54:34 -0500 Subject: [PATCH 26/38] removing flaky for lint --- tests/integration/renderer/test_request_hooks.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration/renderer/test_request_hooks.py b/tests/integration/renderer/test_request_hooks.py index c735fba11a..914f8087ae 100644 --- a/tests/integration/renderer/test_request_hooks.py +++ b/tests/integration/renderer/test_request_hooks.py @@ -6,8 +6,6 @@ from tests.utils import test_async from multiprocessing import Value -from flaky import flaky - from dash import Dash, Output, Input, html, dcc from dash.types import RendererHooks from werkzeug.exceptions import HTTPException From df29ee664849e88aa98334024262a87d38fe54f7 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 14:55:41 -0500 Subject: [PATCH 27/38] adjustments for formatting --- tests/integration/renderer/test_request_hooks.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/integration/renderer/test_request_hooks.py b/tests/integration/renderer/test_request_hooks.py index 914f8087ae..b00dce95f9 100644 --- a/tests/integration/renderer/test_request_hooks.py +++ b/tests/integration/renderer/test_request_hooks.py @@ -245,8 +245,11 @@ def test_rdrh003_refresh_jwt(expiry_code, dash_duo): ] ) - @app.callback(Output("output-1", "children"), [Input("input", "value") - ],prevent_initial_call=True) + @app.callback( + Output("output-1", "children"), + [Input("input", "value")], + prevent_initial_call=True, + ) def update_output(value): jwt_token.value = len(value) + 1 return value @@ -272,7 +275,10 @@ def wrap(*args, **kwargs): return e if asyncio.iscoroutinefunction(func): if test_async(): - from asgiref.sync import async_to_sync # pylint: disable=unused-import, # noqa: F401 + from asgiref.sync import ( + async_to_sync, + ) # pylint: disable=unused-import, # noqa: F401 + return async_to_sync(func)(*args, **kwargs) return func(*args, **kwargs) From 5342573ab7121e80dac0b3adeae758a209f47a32 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 15:47:30 -0500 Subject: [PATCH 28/38] simplifying `jwt` test by using `before_request` removing needs to check for async and wrapping a view. --- .../renderer/test_request_hooks.py | 48 ++++--------------- 1 file changed, 10 insertions(+), 38 deletions(-) diff --git a/tests/integration/renderer/test_request_hooks.py b/tests/integration/renderer/test_request_hooks.py index b00dce95f9..fe7d780c72 100644 --- a/tests/integration/renderer/test_request_hooks.py +++ b/tests/integration/renderer/test_request_hooks.py @@ -1,14 +1,10 @@ -import asyncio import json -import functools import flask import pytest -from tests.utils import test_async from multiprocessing import Value from dash import Dash, Output, Input, html, dcc from dash.types import RendererHooks -from werkzeug.exceptions import HTTPException def test_rdrh001_request_hooks(dash_duo): @@ -257,40 +253,16 @@ def update_output(value): jwt_token = Value("i", 0) # test with an auth layer that requires a JWT with a certain length - def protect_route(func): - @functools.wraps(func) - def wrap(*args, **kwargs): - try: - - if flask.request.method == "OPTIONS": - return func(*args, **kwargs) - token = flask.request.headers.environ.get("HTTP_AUTHORIZATION") - if jwt_token.value and ( - not token or len(token) != jwt_token.value + len("Bearer ") - ): - # Read the data to prevent bug with base http server. - flask.request.get_json(silent=True) - flask.abort(expiry_code, description="JWT Expired " + str(token)) - except HTTPException as e: - return e - if asyncio.iscoroutinefunction(func): - if test_async(): - from asgiref.sync import ( - async_to_sync, - ) # pylint: disable=unused-import, # noqa: F401 - - return async_to_sync(func)(*args, **kwargs) - return func(*args, **kwargs) - - return wrap - - # wrap all API calls with auth. - for name, method in ( - (x, app.server.view_functions[x]) - for x in app.routes - if x in app.server.view_functions - ): - app.server.view_functions[name] = protect_route(method) + @app.server.before_request + def add_auth(): + if flask.request.method != "OPTIONS": + token = flask.request.headers.environ.get("HTTP_AUTHORIZATION") + if jwt_token.value and ( + not token or len(token) != jwt_token.value + len("Bearer ") + ): + # Read the data to prevent bug with base http server. + flask.request.get_json(silent=True) + flask.abort(expiry_code, description="JWT Expired " + str(token)) dash_duo.start_server(app) From c79debf17ce21537807bc65b354d45da49ca00a8 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 16:34:25 -0500 Subject: [PATCH 29/38] attempting to allow for failed tests to be rerun to see if the cache being cleared allows the failed integrations to pass --- package.json | 2 +- rerun_failed_tests.py | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 rerun_failed_tests.py diff --git a/package.json b/package.json index 94afcd7b5c..22ba0b4400 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,7 @@ "lint": "run-s private::lint.* --continue-on-error", "setup-tests.py": "run-s private::test.py.deploy-*", "setup-tests.R": "run-s private::test.R.deploy-*", - "citest.integration": "run-s setup-tests.py private::test.integration-*", + "citest.integration": "run-s setup-tests.py private::test.integration-* && python rerun_failed_tests.py", "citest.unit": "run-s private::test.unit-**", "test": "pytest && cd dash/dash-renderer && npm run test", "first-build": "cd dash/dash-renderer && npm i && cd ../../ && cd components/dash-html-components && npm i && npm run extract && cd ../../ && npm run build" diff --git a/rerun_failed_tests.py b/rerun_failed_tests.py new file mode 100644 index 0000000000..9b2859eb57 --- /dev/null +++ b/rerun_failed_tests.py @@ -0,0 +1,25 @@ +import xml.etree.ElementTree as ET +import subprocess + +def parse_test_results(file_path): + tree = ET.parse(file_path) + root = tree.getroot() + failed_tests = [] + for testcase in root.iter('testcase'): + if testcase.find('failure') is not None: + failed_tests.append(testcase.get('name')) + return failed_tests + +def rerun_failed_tests(failed_tests): + if failed_tests: + print("Initial failed tests:", failed_tests) + failed_test_names = ' '.join(failed_tests) + result = subprocess.run(f'pytest --headless {failed_test_names}', shell=True, capture_output=True, text=True) + print(result.stdout) + print(result.stderr) + else: + print('All tests passed.') + +if __name__ == "__main__": + failed_tests = parse_test_results('test-reports/junit_intg.xml') + rerun_failed_tests(failed_tests) From 37d649c06c022328f37ac8b9d1492e9b87a164ec Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 16:50:58 -0500 Subject: [PATCH 30/38] moving retry to `integration-dash` --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 22ba0b4400..2450dab155 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "private::test.unit-dash": "pytest tests/unit", "private::test.unit-renderer": "cd dash/dash-renderer && npm run test", "private::test.unit-generation": "cd @plotly/dash-generator-test-component-typescript && npm ci && npm test", - "private::test.integration-dash": "TESTFILES=$(circleci tests glob \"tests/integration/**/test_*.py\" | circleci tests split --split-by=timings) && pytest --headless --nopercyfinalize --junitxml=test-reports/junit_intg.xml ${TESTFILES}", + "private::test.integration-dash": "TESTFILES=$(circleci tests glob \"tests/integration/**/test_*.py\" | circleci tests split --split-by=timings) && pytest --headless --nopercyfinalize --junitxml=test-reports/junit_intg.xml ${TESTFILES} && python rerun_failed_tests.py", "private::test.integration-dash-import": "cd tests/integration/dash && python dash_import_test.py", "cibuild": "run-s private::cibuild.*", "build": "run-s private::build.*", @@ -41,7 +41,7 @@ "lint": "run-s private::lint.* --continue-on-error", "setup-tests.py": "run-s private::test.py.deploy-*", "setup-tests.R": "run-s private::test.R.deploy-*", - "citest.integration": "run-s setup-tests.py private::test.integration-* && python rerun_failed_tests.py", + "citest.integration": "run-s setup-tests.py private::test.integration-*", "citest.unit": "run-s private::test.unit-**", "test": "pytest && cd dash/dash-renderer && npm run test", "first-build": "cd dash/dash-renderer && npm i && cd ../../ && cd components/dash-html-components && npm i && npm run extract && cd ../../ && npm run build" From eaec04f49e65991e16c0b95ef82c6477241c7948 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 17:12:04 -0500 Subject: [PATCH 31/38] adjusting browser for percy snapshot to append `async` to the snapshot name to note if the image is `async` vs default --- dash/testing/browser.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dash/testing/browser.py b/dash/testing/browser.py index c4fcb8fca8..508345a9a0 100644 --- a/dash/testing/browser.py +++ b/dash/testing/browser.py @@ -159,6 +159,11 @@ def percy_snapshot( """ if widths is None: widths = [1280] + try: + import asgiref # pylint: disable=unused-import, # noqa: F401 + name += '_async' + except: + pass logger.info("taking snapshot name => %s", name) try: From 68320f415dc0e11b6c0f3a5facd00ef6358d5c62 Mon Sep 17 00:00:00 2001 From: BryanSchroeder Date: Mon, 9 Dec 2024 17:29:17 -0500 Subject: [PATCH 32/38] fixing `browser` for lint --- dash/testing/browser.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dash/testing/browser.py b/dash/testing/browser.py index 508345a9a0..0c55f7031a 100644 --- a/dash/testing/browser.py +++ b/dash/testing/browser.py @@ -160,9 +160,10 @@ def percy_snapshot( if widths is None: widths = [1280] try: - import asgiref # pylint: disable=unused-import, # noqa: F401 - name += '_async' - except: + import asgiref # pylint: disable=unused-import, import-outside-toplevel # noqa: F401, C0415 + + name += "_async" + except ImportError: pass logger.info("taking snapshot name => %s", name) From fc6c6bc99bdf36efdee6bf0b0f8fcca78f8546c8 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 12:51:16 -0400 Subject: [PATCH 33/38] updating missing `long` -> `background` adjustments and fixing issue for missing `callback_ctx` --- .circleci/config.yml | 6 +++++- dash/_callback.py | 2 ++ dash/background_callback/managers/celery_manager.py | 6 +++--- dash/background_callback/managers/diskcache_manager.py | 6 +++--- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 44de003c60..e8c17a2df9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -248,7 +248,11 @@ jobs: steps: - checkout: path: ~/dash - - run: sudo apt-get update + - run: + name: Add chrome keys & update. + command: | + wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - + sudo apt-get update - run: echo $PYVERSION > ver.txt - run: cat requirements/*.txt > requirements-all.txt - restore_cache: diff --git a/dash/_callback.py b/dash/_callback.py index dedbaefe28..3c21b26f5a 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -671,6 +671,7 @@ def add_context(*args, **kwargs): func, func_args, func_kwargs, + callback_ctx ) output_value, has_update, skip = _update_background_callback( @@ -741,6 +742,7 @@ async def async_add_context(*args, **kwargs): func, func_args, func_kwargs, + callback_ctx ) output_value, has_update, skip = _update_background_callback( error_handler, callback_ctx, response, kwargs, background, multi diff --git a/dash/background_callback/managers/celery_manager.py b/dash/background_callback/managers/celery_manager.py index bbef7203aa..5235b65a80 100644 --- a/dash/background_callback/managers/celery_manager.py +++ b/dash/background_callback/managers/celery_manager.py @@ -18,7 +18,7 @@ class CeleryManager(BaseBackgroundCallbackManager): def __init__(self, celery_app, cache_by=None, expire=None): """ - Long callback manager that runs callback logic on a celery task queue, + Background callback manager that runs callback logic on a celery task queue, and stores results using a celery result backend. :param celery_app: @@ -42,7 +42,7 @@ def __init__(self, celery_app, cache_by=None, expire=None): except ImportError as missing_imports: raise ImportError( """\ -CeleryLongCallbackManager requires extra dependencies which can be installed doing +CeleryManager requires extra dependencies which can be installed doing $ pip install "dash[celery]"\n""" ) from missing_imports @@ -233,7 +233,7 @@ async def async_run(): result_key, json.dumps( { - "long_callback_error": { + "background_callback_error": { "msg": str(err), "tb": traceback.format_exc(), } diff --git a/dash/background_callback/managers/diskcache_manager.py b/dash/background_callback/managers/diskcache_manager.py index 1e21acfb8c..ed89e5df41 100644 --- a/dash/background_callback/managers/diskcache_manager.py +++ b/dash/background_callback/managers/diskcache_manager.py @@ -17,7 +17,7 @@ class DiskcacheManager(BaseBackgroundCallbackManager): def __init__(self, cache=None, cache_by=None, expire=None): """ - Long callback manager that runs callback logic in a subprocess and stores + Background callback manager that runs callback logic in a subprocess and stores results on disk using diskcache :param cache: @@ -40,7 +40,7 @@ def __init__(self, cache=None, cache_by=None, expire=None): except ImportError as missing_imports: raise ImportError( """\ -DiskcacheLongCallbackManager requires extra dependencies which can be installed doing +DiskcacheManager requires extra dependencies which can be installed doing $ pip install "dash[diskcache]"\n""" ) from missing_imports @@ -279,7 +279,7 @@ async def async_run(): cache.set( result_key, { - "long_callback_error": { + "background_callback_error": { "msg": str(err), "tb": traceback.format_exc(), } From 503f7c04a119a9008d60b55aaa43fb28136356ab Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 13:13:15 -0400 Subject: [PATCH 34/38] fixing for lint --- dash/_callback.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/dash/_callback.py b/dash/_callback.py index 3c21b26f5a..3947c08da1 100644 --- a/dash/_callback.py +++ b/dash/_callback.py @@ -342,7 +342,9 @@ def _initialize_context(args, kwargs, inputs_state_indices, has_output, insert_o def _get_callback_manager(kwargs, background): """Set up the background callback and manage jobs.""" - callback_manager = background.get("manager", kwargs.get("background_callback_manager", None)) + callback_manager = background.get( + "manager", kwargs.get("background_callback_manager", None) + ) if background is not None: if not callback_manager: raise MissingLongCallbackManagerError( @@ -364,13 +366,7 @@ def _get_callback_manager(kwargs, background): def _setup_background_callback( - kwargs, - background, - background_key, - func, - func_args, - func_kwargs, - callback_ctx + kwargs, background, background_key, func, func_args, func_kwargs, callback_ctx ): """Set up the background callback and manage jobs.""" callback_manager = _get_callback_manager(kwargs, background) @@ -384,9 +380,7 @@ def _setup_background_callback( # Inputs provided as dict is kwargs. func_args if func_args else func_kwargs, background.get("cache_args_to_ignore", []), - None - if cache_ignore_triggered - else callback_ctx.get("triggered_inputs", []), + None if cache_ignore_triggered else callback_ctx.get("triggered_inputs", []), ) job_fn = callback_manager.func_registry.get(background_key) @@ -433,7 +427,9 @@ def _progress_background_callback(response, callback_manager, background): } -def _update_background_callback(error_handler, callback_ctx, response, kwargs, background, multi): +def _update_background_callback( + error_handler, callback_ctx, response, kwargs, background, multi +): """Set up the background callback and manage jobs.""" callback_manager = _get_callback_manager(kwargs, background) @@ -671,7 +667,7 @@ def add_context(*args, **kwargs): func, func_args, func_kwargs, - callback_ctx + callback_ctx, ) output_value, has_update, skip = _update_background_callback( @@ -742,7 +738,7 @@ async def async_add_context(*args, **kwargs): func, func_args, func_kwargs, - callback_ctx + callback_ctx, ) output_value, has_update, skip = _update_background_callback( error_handler, callback_ctx, response, kwargs, background, multi From c26671e3b17e6f21b975cdd470b2efd893135071 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 14:38:09 -0400 Subject: [PATCH 35/38] adding tests for async background callbacks --- tests/integration/async_tests/__init__.py | 0 tests/integration/async_tests/app1_async.py | 31 ++++ .../async_tests/app_arbitrary_async.py | 50 ++++++ tests/integration/async_tests/conftest.py | 15 ++ .../test_async_background_callbacks.py | 57 +++++++ .../test_async_callbacks.py | 34 ++-- tests/integration/async_tests/utils.py | 154 ++++++++++++++++++ 7 files changed, 324 insertions(+), 17 deletions(-) create mode 100644 tests/integration/async_tests/__init__.py create mode 100644 tests/integration/async_tests/app1_async.py create mode 100644 tests/integration/async_tests/app_arbitrary_async.py create mode 100644 tests/integration/async_tests/conftest.py create mode 100644 tests/integration/async_tests/test_async_background_callbacks.py rename tests/integration/{async => async_tests}/test_async_callbacks.py (97%) create mode 100644 tests/integration/async_tests/utils.py diff --git a/tests/integration/async_tests/__init__.py b/tests/integration/async_tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/async_tests/app1_async.py b/tests/integration/async_tests/app1_async.py new file mode 100644 index 0000000000..daf76e885b --- /dev/null +++ b/tests/integration/async_tests/app1_async.py @@ -0,0 +1,31 @@ +from dash import Dash, Input, Output, dcc, html +import time + +from tests.integration.background_callback.utils import get_background_callback_manager + +background_callback_manager = get_background_callback_manager() +handle = background_callback_manager.handle + +app = Dash(__name__) +app.layout = html.Div( + [ + dcc.Input(id="input", value="initial value"), + html.Div(html.Div([1.5, None, "string", html.Div(id="output-1")])), + ] +) + + +@app.callback( + Output("output-1", "children"), + [Input("input", "value")], + interval=500, + manager=background_callback_manager, + background=True, +) +async def update_output(value): + time.sleep(0.1) + return value + + +if __name__ == "__main__": + app.run(debug=True) diff --git a/tests/integration/async_tests/app_arbitrary_async.py b/tests/integration/async_tests/app_arbitrary_async.py new file mode 100644 index 0000000000..080859b22b --- /dev/null +++ b/tests/integration/async_tests/app_arbitrary_async.py @@ -0,0 +1,50 @@ +from dash import Dash, Input, Output, html, callback, set_props +import time + +from tests.integration.background_callback.utils import get_background_callback_manager + +background_callback_manager = get_background_callback_manager() +handle = background_callback_manager.handle + +app = Dash(__name__, background_callback_manager=background_callback_manager) +app.test_lock = lock = background_callback_manager.test_lock + +app.layout = html.Div( + [ + html.Button("start", id="start"), + html.Div(id="secondary"), + html.Div(id="no-output"), + html.Div("initial", id="output"), + html.Button("start-no-output", id="start-no-output"), + ] +) + + +@callback( + Output("output", "children"), + Input("start", "n_clicks"), + prevent_initial_call=True, + background=True, + interval=500, +) +async def on_click(_): + set_props("secondary", {"children": "first"}) + set_props("secondary", {"style": {"background": "red"}}) + time.sleep(2) + set_props("secondary", {"children": "second"}) + return "completed" + + +@callback( + Input("start-no-output", "n_clicks"), + prevent_initial_call=True, + background=True, +) +async def on_click(_): + set_props("no-output", {"children": "started"}) + time.sleep(2) + set_props("no-output", {"children": "completed"}) + + +if __name__ == "__main__": + app.run(debug=True) diff --git a/tests/integration/async_tests/conftest.py b/tests/integration/async_tests/conftest.py new file mode 100644 index 0000000000..b701eea91a --- /dev/null +++ b/tests/integration/async_tests/conftest.py @@ -0,0 +1,15 @@ +import os + +import pytest + + +if "REDIS_URL" in os.environ: + managers = ["celery", "diskcache"] +else: + print("Skipping celery tests because REDIS_URL is not defined") + managers = ["diskcache"] + + +@pytest.fixture(params=managers) +def manager(request): + return request.param diff --git a/tests/integration/async_tests/test_async_background_callbacks.py b/tests/integration/async_tests/test_async_background_callbacks.py new file mode 100644 index 0000000000..d5cdff7917 --- /dev/null +++ b/tests/integration/async_tests/test_async_background_callbacks.py @@ -0,0 +1,57 @@ +import sys + +import pytest +from flaky import flaky +from multiprocessing import Lock +from tests.integration.async_tests.utils import setup_background_callback_app +import time + +def test_001ab_arbitrary(dash_duo, manager): + with setup_background_callback_app(manager, "app_arbitrary_async") as app: + dash_duo.start_server(app) + + dash_duo.wait_for_text_to_equal("#output", "initial") + # pause for sync + time.sleep(.2) + dash_duo.find_element("#start").click() + + dash_duo.wait_for_text_to_equal("#secondary", "first") + dash_duo.wait_for_style_to_equal( + "#secondary", "background-color", "rgba(255, 0, 0, 1)" + ) + dash_duo.wait_for_text_to_equal("#output", "initial") + dash_duo.wait_for_text_to_equal("#secondary", "second") + dash_duo.wait_for_text_to_equal("#output", "completed") + + dash_duo.find_element("#start-no-output").click() + + dash_duo.wait_for_text_to_equal("#no-output", "started") + dash_duo.wait_for_text_to_equal("#no-output", "completed") + + +@pytest.mark.skipif( + sys.version_info < (3, 7), reason="Python 3.6 long callbacks tests hangs up" +) +@flaky(max_runs=3) +def test_002ab_basic(dash_duo, manager): + """ + Make sure that we settle to the correct final value when handling rapid inputs + """ + lock = Lock() + with setup_background_callback_app(manager, "app1_async") as app: + dash_duo.start_server(app) + dash_duo.wait_for_text_to_equal("#output-1", "initial value", 15) + input_ = dash_duo.find_element("#input") + # pause for sync + time.sleep(.2) + dash_duo.clear_input(input_) + + for key in "hello world": + with lock: + input_.send_keys(key) + + dash_duo.wait_for_text_to_equal("#output-1", "hello world", 8) + + assert not dash_duo.redux_state_is_loading + assert dash_duo.get_logs() == [] + diff --git a/tests/integration/async/test_async_callbacks.py b/tests/integration/async_tests/test_async_callbacks.py similarity index 97% rename from tests/integration/async/test_async_callbacks.py rename to tests/integration/async_tests/test_async_callbacks.py index fd47c7fa05..5489b54910 100644 --- a/tests/integration/async/test_async_callbacks.py +++ b/tests/integration/async_tests/test_async_callbacks.py @@ -28,7 +28,7 @@ from tests.utils import test_async -def test_async_cbsc001_simple_callback(dash_duo): +def test_async_cbsc001_simple_callback(dash_duo, *args): if not test_async(): return lock = Lock() @@ -68,7 +68,7 @@ async def update_output(value): assert dash_duo.get_logs() == [] -def test_async_cbsc002_callbacks_generating_children(dash_duo): +def test_async_cbsc002_callbacks_generating_children(dash_duo, *args): """Modify the DOM tree by adding new components in the callbacks.""" if not test_async(): return @@ -154,7 +154,7 @@ async def update_input(value): assert dash_duo.get_logs() == [], "console is clean" -def test_async_cbsc003_callback_with_unloaded_async_component(dash_duo): +def test_async_cbsc003_callback_with_unloaded_async_component(dash_duo, *args): if not test_async(): return app = Dash() @@ -189,7 +189,7 @@ async def update_out(n_clicks): assert dash_duo.get_logs() == [] -def test_async_cbsc004_callback_using_unloaded_async_component(dash_duo): +def test_async_cbsc004_callback_using_unloaded_async_component(dash_duo, *args): if not test_async(): return app = Dash() @@ -322,7 +322,7 @@ async def set_out(opts): strict=False, ) @pytest.mark.parametrize("refresh", [False, True]) -def test_async_cbsc007_parallel_updates(refresh, dash_duo): +def test_async_cbsc007_parallel_updates(refresh, dash_duo, *args): # This is a funny case, that seems to mostly happen with dcc.Location # but in principle could happen in other cases too: # A callback chain (in this case the initial hydration) is set to update a @@ -377,7 +377,7 @@ async def set_path(n): dash_duo.wait_for_text_to_equal("#out", '[{"a": "/2:a"}] - /2') -def test_async_cbsc008_wildcard_prop_callbacks(dash_duo): +def test_async_cbsc008_wildcard_prop_callbacks(dash_duo, *args): if not test_async(): return lock = Lock() @@ -444,7 +444,7 @@ async def update_text(data): assert dash_duo.get_logs() == [] -def test_async_cbsc009_callback_using_unloaded_async_component_and_graph(dash_duo): +def test_async_cbsc009_callback_using_unloaded_async_component_and_graph(dash_duo, *args): if not test_async(): return app = Dash(__name__) @@ -489,7 +489,7 @@ async def content(n, d, v): assert dash_duo.get_logs() == [] -def test_async_cbsc010_event_properties(dash_duo): +def test_async_cbsc010_event_properties(dash_duo, *args): if not test_async(): return app = Dash(__name__) @@ -513,7 +513,7 @@ async def update_output(n_clicks): assert call_count.value == 1 -def test_async_cbsc011_one_call_for_multiple_outputs_initial(dash_duo): +def test_async_cbsc011_one_call_for_multiple_outputs_initial(dash_duo, *args): if not test_async(): return app = Dash(__name__) @@ -551,7 +551,7 @@ async def dynamic_output(*args): assert dash_duo.get_logs() == [] -def test_async_cbsc012_one_call_for_multiple_outputs_update(dash_duo): +def test_async_cbsc012_one_call_for_multiple_outputs_update(dash_duo, *args): if not test_async(): return app = Dash(__name__, suppress_callback_exceptions=True) @@ -603,7 +603,7 @@ async def dynamic_output(*args): assert dash_duo.get_logs() == [] -def test_async_cbsc013_multi_output_out_of_order(dash_duo): +def test_async_cbsc013_multi_output_out_of_order(dash_duo, *args): if not test_async(): return app = Dash(__name__) @@ -704,7 +704,7 @@ async def update_output(n1, t1, n2, t2): assert call_count.value == 4 -def test_async_cbsc015_input_output_callback(dash_duo): +def test_async_cbsc015_input_output_callback(dash_duo, *args): if not test_async(): return return @@ -757,7 +757,7 @@ def test_async_cbsc015_input_output_callback(dash_duo): # assert dash_duo.get_logs() == [] -def test_async_cbsc016_extra_components_callback(dash_duo): +def test_async_cbsc016_extra_components_callback(dash_duo, *args): if not test_async(): return lock = Lock() @@ -818,7 +818,7 @@ def test_async_cbsc017_callback_directly_callable(): # assert update_output("my-value") == "returning my-value" -def test_async_cbsc018_callback_ndarray_output(dash_duo): +def test_async_cbsc018_callback_ndarray_output(dash_duo, *args): if not test_async(): return app = Dash(__name__) @@ -836,7 +836,7 @@ async def on_click(_): assert dash_duo.get_logs() == [] -def test_async_cbsc019_callback_running(dash_duo): +def test_async_cbsc019_callback_running(dash_duo, *args): if not test_async(): return lock = Lock() @@ -870,7 +870,7 @@ async def on_click(_): dash_duo.wait_for_text_to_equal("#running", "off") -def test_async_cbsc020_callback_running_non_existing_component(dash_duo): +def test_async_cbsc020_callback_running_non_existing_component(dash_duo, *args): if not test_async(): return lock = Lock() @@ -907,7 +907,7 @@ async def on_click(_): dash_duo.wait_for_text_to_equal("#output", "done") -def test_async_cbsc021_callback_running_non_existing_component(dash_duo): +def test_async_cbsc021_callback_running_non_existing_component(dash_duo, *args): if not test_async(): return lock = Lock() diff --git a/tests/integration/async_tests/utils.py b/tests/integration/async_tests/utils.py new file mode 100644 index 0000000000..4975545e4c --- /dev/null +++ b/tests/integration/async_tests/utils.py @@ -0,0 +1,154 @@ +import os +import shutil +import subprocess +import tempfile +import time +from contextlib import contextmanager + +import psutil +import redis + +from dash.background_callback import DiskcacheManager + +manager = None + + +class TestDiskCacheManager(DiskcacheManager): + def __init__(self, cache=None, cache_by=None, expire=None): + super().__init__(cache=cache, cache_by=cache_by, expire=expire) + self.running_jobs = [] + + def call_job_fn( + self, + key, + job_fn, + args, + context, + ): + pid = super().call_job_fn(key, job_fn, args, context) + self.running_jobs.append(pid) + return pid + + +def get_background_callback_manager(): + """ + Get the long callback mangaer configured by environment variables + """ + if os.environ.get("LONG_CALLBACK_MANAGER", None) == "celery": + from dash.background_callback import CeleryManager + from celery import Celery + import redis + + celery_app = Celery( + __name__, + broker=os.environ.get("CELERY_BROKER"), + backend=os.environ.get("CELERY_BACKEND"), + ) + background_callback_manager = CeleryManager(celery_app) + redis_conn = redis.Redis(host="localhost", port=6379, db=1) + background_callback_manager.test_lock = redis_conn.lock("test-lock") + elif os.environ.get("LONG_CALLBACK_MANAGER", None) == "diskcache": + import diskcache + + cache = diskcache.Cache(os.environ.get("DISKCACHE_DIR")) + background_callback_manager = TestDiskCacheManager(cache) + background_callback_manager.test_lock = diskcache.Lock(cache, "test-lock") + else: + raise ValueError( + "Invalid long callback manager specified as LONG_CALLBACK_MANAGER " + "environment variable" + ) + + global manager + manager = background_callback_manager + + return background_callback_manager + + +def kill(proc_pid): + process = psutil.Process(proc_pid) + for proc in process.children(recursive=True): + proc.kill() + process.kill() + + +@contextmanager +def setup_background_callback_app(manager_name, app_name): + from dash.testing.application_runners import import_app + + if manager_name == "celery": + os.environ["LONG_CALLBACK_MANAGER"] = "celery" + redis_url = os.environ["REDIS_URL"].rstrip("/") + os.environ["CELERY_BROKER"] = f"{redis_url}/0" + os.environ["CELERY_BACKEND"] = f"{redis_url}/1" + + # Clear redis of cached values + redis_conn = redis.Redis(host="localhost", port=6379, db=1) + cache_keys = redis_conn.keys() + if cache_keys: + redis_conn.delete(*cache_keys) + + worker = subprocess.Popen( + [ + "celery", + "-A", + f"tests.integration.background_callback.{app_name}:handle", + "worker", + "-P", + "prefork", + "--concurrency", + "2", + "--loglevel=info", + ], + encoding="utf8", + preexec_fn=os.setpgrp, + stderr=subprocess.PIPE, + ) + # Wait for the worker to be ready, if you cancel before it is ready, the job + # will still be queued. + lines = [] + for line in iter(worker.stderr.readline, ""): + if "ready" in line: + break + lines.append(line) + else: + error = "\n".join(lines) + raise RuntimeError(f"celery failed to start: {error}") + + try: + yield import_app(f"tests.integration.async_tests.{app_name}") + finally: + # Interval may run one more time after settling on final app state + # Sleep for 1 interval of time + time.sleep(0.5) + os.environ.pop("LONG_CALLBACK_MANAGER") + os.environ.pop("CELERY_BROKER") + os.environ.pop("CELERY_BACKEND") + kill(worker.pid) + from dash import page_registry + + page_registry.clear() + + elif manager_name == "diskcache": + os.environ["LONG_CALLBACK_MANAGER"] = "diskcache" + cache_directory = tempfile.mkdtemp(prefix="lc-diskcache-") + print(cache_directory) + os.environ["DISKCACHE_DIR"] = cache_directory + try: + app = import_app(f"tests.integration.async_tests.{app_name}") + yield app + finally: + # Interval may run one more time after settling on final app state + # Sleep for a couple of intervals + time.sleep(2.0) + + if hasattr(manager, "running_jobs"): + for job in manager.running_jobs: + manager.terminate_job(job) + + shutil.rmtree(cache_directory, ignore_errors=True) + os.environ.pop("LONG_CALLBACK_MANAGER") + os.environ.pop("DISKCACHE_DIR") + from dash import page_registry + + page_registry.clear() From 20dcd6741a79fa2a6cdc276c959c08f8cf077b42 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 15:03:16 -0400 Subject: [PATCH 36/38] updating to bypass test if not async --- .../async_tests/test_async_background_callbacks.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/integration/async_tests/test_async_background_callbacks.py b/tests/integration/async_tests/test_async_background_callbacks.py index d5cdff7917..ba9a2ea58c 100644 --- a/tests/integration/async_tests/test_async_background_callbacks.py +++ b/tests/integration/async_tests/test_async_background_callbacks.py @@ -5,8 +5,11 @@ from multiprocessing import Lock from tests.integration.async_tests.utils import setup_background_callback_app import time +from tests.utils import test_async def test_001ab_arbitrary(dash_duo, manager): + if not test_async(): + return with setup_background_callback_app(manager, "app_arbitrary_async") as app: dash_duo.start_server(app) @@ -37,6 +40,8 @@ def test_002ab_basic(dash_duo, manager): """ Make sure that we settle to the correct final value when handling rapid inputs """ + if not test_async(): + return lock = Lock() with setup_background_callback_app(manager, "app1_async") as app: dash_duo.start_server(app) From bb3d881cf60e3ea4f12a0ecd55cf5c4dd563ff46 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 15:25:01 -0400 Subject: [PATCH 37/38] fixing for lint --- .../async_tests/test_async_background_callbacks.py | 6 +++--- tests/integration/async_tests/test_async_callbacks.py | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/integration/async_tests/test_async_background_callbacks.py b/tests/integration/async_tests/test_async_background_callbacks.py index ba9a2ea58c..03526d7941 100644 --- a/tests/integration/async_tests/test_async_background_callbacks.py +++ b/tests/integration/async_tests/test_async_background_callbacks.py @@ -7,6 +7,7 @@ import time from tests.utils import test_async + def test_001ab_arbitrary(dash_duo, manager): if not test_async(): return @@ -15,7 +16,7 @@ def test_001ab_arbitrary(dash_duo, manager): dash_duo.wait_for_text_to_equal("#output", "initial") # pause for sync - time.sleep(.2) + time.sleep(0.2) dash_duo.find_element("#start").click() dash_duo.wait_for_text_to_equal("#secondary", "first") @@ -48,7 +49,7 @@ def test_002ab_basic(dash_duo, manager): dash_duo.wait_for_text_to_equal("#output-1", "initial value", 15) input_ = dash_duo.find_element("#input") # pause for sync - time.sleep(.2) + time.sleep(0.2) dash_duo.clear_input(input_) for key in "hello world": @@ -59,4 +60,3 @@ def test_002ab_basic(dash_duo, manager): assert not dash_duo.redux_state_is_loading assert dash_duo.get_logs() == [] - diff --git a/tests/integration/async_tests/test_async_callbacks.py b/tests/integration/async_tests/test_async_callbacks.py index 5489b54910..2314da16f0 100644 --- a/tests/integration/async_tests/test_async_callbacks.py +++ b/tests/integration/async_tests/test_async_callbacks.py @@ -444,7 +444,9 @@ async def update_text(data): assert dash_duo.get_logs() == [] -def test_async_cbsc009_callback_using_unloaded_async_component_and_graph(dash_duo, *args): +def test_async_cbsc009_callback_using_unloaded_async_component_and_graph( + dash_duo, *args +): if not test_async(): return app = Dash(__name__) From c6940c7c6867defff1d43bae1226db4500d62e04 Mon Sep 17 00:00:00 2001 From: BSd3v <82055130+BSd3v@users.noreply.github.com> Date: Tue, 25 Mar 2025 16:05:09 -0400 Subject: [PATCH 38/38] update for celery app process --- tests/integration/async_tests/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/async_tests/utils.py b/tests/integration/async_tests/utils.py index 4975545e4c..ab2038f79f 100644 --- a/tests/integration/async_tests/utils.py +++ b/tests/integration/async_tests/utils.py @@ -92,7 +92,7 @@ def setup_background_callback_app(manager_name, app_name): [ "celery", "-A", - f"tests.integration.background_callback.{app_name}:handle", + f"tests.integration.async_tests.{app_name}:handle", "worker", "-P", "prefork",