From a67deeb60270c4d458b3cebde276ca51e7657aa4 Mon Sep 17 00:00:00 2001 From: Peter Law Date: Sun, 30 Jun 2024 17:16:47 +0100 Subject: [PATCH] Fix race condition around subprocess inference state tidyup There was a race condition due to the combination of Python's object ids being re-usable and Jedi persisting such ids beyond the real lifeteime of some objects. This could lead to the subprocess' view of the lifetime of `InferenceState` contexts getting out of step with that in the parent process and resulting in errors when removing them. It is also possible that this could result in erroneous results being reported, however this was not directly observed. The race was specifically: - `InferenceState` A created, gets id 1 - `InferenceStateSubprocess` A' created, uses `InferenceState` A which it stores as a weakref and an id - `InferenceStateSubprocess` A' is used, the sub-process learns about an `InferenceState` with id 1 - `InferenceState` A goes away, `InferenceStateSubprocess` A' is not yet garbage collected - `InferenceState` B created, gets id 1 - `InferenceStateSubprocess` B' created, uses `InferenceState` B which it stores as a weakref and an id - `InferenceStateSubprocess` B' is used, the sub-process re-uses its entry for an `InferenceState` with id 1 At this point the order of operations between the two `InferenceStateSubprocess` instances going away is immaterial -- both will trigger a removal of a state with id 1. As long as B' doesn't try to use the sub-process again after the first removal has happened then the second removal will fail. This commit resolves the race condition by coupling the context in the subprocess to the corresponding manager class instance in the parent process, rather than to the consumer `InferenceState`. See inline comments for further details. --- .../inference/compiled/subprocess/__init__.py | 29 ++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/jedi/inference/compiled/subprocess/__init__.py b/jedi/inference/compiled/subprocess/__init__.py index 852910493..3a6039f74 100644 --- a/jedi/inference/compiled/subprocess/__init__.py +++ b/jedi/inference/compiled/subprocess/__init__.py @@ -106,7 +106,6 @@ def _cleanup_process(process, thread): class _InferenceStateProcess: def __init__(self, inference_state: 'InferenceState') -> None: self._inference_state_weakref = weakref.ref(inference_state) - self._inference_state_id = id(inference_state) self._handles: Dict[int, AccessHandle] = {} def get_or_create_access_handle(self, obj): @@ -159,6 +158,27 @@ def __init__( self._used = False self._compiled_subprocess = compiled_subprocess + # Opaque id we'll pass to the subprocess to identify the context (an + # `InferenceState`) which should be used for the request. This allows us + # to make subsequent requests which operate on results from previous + # ones, while keeping a single subprocess which can work with several + # contexts in the parent process. Once it is no longer needed(i.e: when + # this class goes away), we also use this id to indicate that the + # subprocess can discard the context. + # + # Note: this id is deliberately coupled to this class (and not to + # `InferenceState`) as this class manages access handle mappings which + # must correspond to those in the subprocess. This approach also avoids + # race conditions from successive `InferenceState`s with the same object + # id (as observed while adding support for Python 3.13). + # + # This value does not need to be the `id()` of this instance, we merely + # need to ensure that it enables the (visible) lifetime of the context + # within the subprocess to match that of this class. We therefore also + # depend on the semantics of `CompiledSubprocess.delete_inference_state` + # for correctness. + self._inference_state_id = id(self) + def __getattr__(self, name): func = _get_function(name) @@ -330,6 +350,10 @@ def delete_inference_state(self, inference_state_id): Note: it is not guaranteed that the corresponding state will actually be deleted immediately. """ + # Warning: if changing the semantics of context deletion see the comment + # in `InferenceStateSubprocess.__init__` regarding potential race + # conditions. + # Currently we are not deleting the related state instantly. They only # get deleted once the subprocess is used again. It would probably a # better solution to move all of this into a thread. However, the memory @@ -397,6 +421,9 @@ def _run(self, inference_state_id, function, args, kwargs): if inference_state_id is None: return function(*args, **kwargs) elif function is None: + # Warning: if changing the semantics of context deletion see the comment + # in `InferenceStateSubprocess.__init__` regarding potential race + # conditions. del self._inference_states[inference_state_id] else: inference_state = self._get_inference_state(function, inference_state_id)