Skip to content

Commit 410ce96

Browse files
Revert "Put Python Dispatcher cache in dict, clear it on new registrations. (#88329)"
This reverts commit 86c7cd2. Reverted #88329 on behalf of https://github.com/clee2000 due to test_decomp takes an extra 2 hours in some jobs, windows takes so long it times out
1 parent 9946041 commit 410ce96

File tree

2 files changed

+13
-21
lines changed

2 files changed

+13
-21
lines changed

torch/_ops.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -243,8 +243,6 @@ def __init__(self, overloadpacket, op, op_dk, schema, tags):
243243
op.__module__ = overloadpacket.__module__
244244
self.__qualname__ = self._name
245245
self.__annotations__ = {}
246-
# NB: This name is hard-coded in torch/csrc/autograd/python_variable.cpp
247-
self._dispatch_cache = {}
248246

249247
# it's a no-op since OpOverload object is immutable and must be unique for a given op overload.
250248
def __deepcopy__(self, memo=None):
@@ -291,7 +289,6 @@ def inner(fn):
291289
assert mode not in self.python_key_mode_table
292290
# TODO(voz): Should we replace setting torch._C.DispatchKey.Python entirely with setting mode keys?
293291
self.python_key_mode_table[mode] = fn
294-
self._dispatch_cache.clear()
295292
return fn
296293

297294
assert isinstance(dispatch_key_or_mode, torch._C.DispatchKey)
@@ -304,19 +301,23 @@ def inner(fn):
304301
f"Trying to override a python impl for {dispatch_key_or_mode} on operator {self._name}"
305302
)
306303
self.py_kernels[dispatch_key_or_mode] = fn
307-
self._dispatch_cache.clear()
308304
return fn
309305

310306
return inner
311307

312308
# This implements the pre-computation logic for the Python dispatcher.
313-
def _get_dispatch(self, key):
314-
# This is only called upon a cache miss
315-
assert key not in self._dispatch_cache
309+
def __getattr__(self, attr):
310+
if len(attr) == 0 or not attr[0].isupper():
311+
raise AttributeError()
312+
313+
try:
314+
key = torch._C._dispatch_key_parse(attr)
315+
except Exception as e:
316+
raise AttributeError()
316317

317318
if key == torch._C.DispatchKey.Python:
318319
if not self.python_key_mode_table:
319-
self._dispatch_cache[key] = key
320+
setattr(self, attr, key)
320321
return key
321322

322323
def handler(*args, **kwargs):
@@ -335,12 +336,12 @@ def handler(*args, **kwargs):
335336
# TODO(voz): The idea behind this is that we do not yet support dispatch by key + mode, only key.
336337
return self.python_key_mode_table[curr_mode](*args, **kwargs)
337338

338-
self._dispatch_cache[key] = handler
339+
setattr(self, attr, handler)
339340
return handler
340341

341342
key = resolve_key(self, key)
342343
r = self.py_kernels.get(key, key)
343-
self._dispatch_cache[key] = r
344+
setattr(self, attr, r)
344345
return r
345346

346347
def name(self):

torch/csrc/autograd/python_variable.cpp

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2321,20 +2321,11 @@ void ConcretePyInterpreterVTable::python_dispatcher(
23212321
torch::jit::Stack* stack) const {
23222322
py::gil_scoped_acquire g;
23232323
py::handle torch_api_function_overload = getTorchApiFunction(op);
2324-
// TODO: if necessary, can optimize to cache the cache lookup
2325-
// TODO: if necessary, can optimize OpOverload to have slots
2326-
auto cache = py::dict(torch_api_function_overload.attr("_dispatch_cache"));
2327-
if (cache.ptr() == nullptr) {
2328-
throw python_error();
2329-
}
23302324

23312325
c10::DispatchKey k = ks.highestPriorityTypeId();
2332-
// TODO: allow this to be non-owning
2333-
auto handler = py::reinterpret_borrow<py::object>(
2334-
PyDict_GetItem(cache.ptr(), py::cast(k).ptr()));
2326+
auto handler = torch_api_function_overload.attr(toString(k));
23352327
if (handler.ptr() == nullptr) {
2336-
// Slow path
2337-
handler = torch_api_function_overload.attr("_get_dispatch")(k);
2328+
throw python_error();
23382329
}
23392330
if (py::isinstance<c10::DispatchKey>(handler)) {
23402331
// NB: not redispatch, as that will permanently remove the python

0 commit comments

Comments
 (0)