-
Notifications
You must be signed in to change notification settings - Fork 360
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix: Remove input aliasing of builtin ops (#2276)
- Loading branch information
Showing
10 changed files
with
238 additions
and
75 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
38 changes: 38 additions & 0 deletions
38
py/torch_tensorrt/dynamo/lowering/_repair_input_aliasing.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import logging | ||
|
||
import torch | ||
from torch_tensorrt.dynamo.lowering.passes.pass_utils import get_tensor_placeholders | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def repair_input_aliasing(gm: torch.fx.GraphModule) -> torch.fx.GraphModule: | ||
"""Inserts clone operators temporarily ahead of every placeholder | ||
See: https://github.com/pytorch/pytorch/issues/108079 | ||
Undone by `remove_input_alias_fixing_clones` after tracing | ||
""" | ||
# Extract graph placeholder Tensors | ||
placeholders = get_tensor_placeholders(gm) | ||
|
||
for node in placeholders: | ||
# Insert clones for placeholder nodes to avoid | ||
# input aliasing or mutation | ||
with gm.graph.inserting_after(placeholders[-1]): | ||
cloned_input = gm.graph.call_function( | ||
torch.ops.aten.clone.default, | ||
args=(node,), | ||
) | ||
|
||
# Replace all uses of the placeholder except the cloned node | ||
# with the cloned placeholder | ||
node.replace_all_uses_with( | ||
cloned_input, | ||
delete_user_cb=lambda node: node != cloned_input, | ||
) | ||
|
||
gm.graph.lint() | ||
gm.recompile() | ||
logger.debug(f"Inserted auxiliary clone nodes for placeholders:\n{gm.graph}") | ||
|
||
return gm |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
from typing import List | ||
|
||
import torch | ||
|
||
|
||
def clean_up_graph_after_modifications( | ||
gm: torch.fx.GraphModule, | ||
) -> torch.fx.GraphModule: | ||
"""Runs dead-code elimination, linting, and recompilation for graph, in-place""" | ||
gm.graph.eliminate_dead_code() | ||
gm.graph.lint() | ||
gm.recompile() | ||
return gm | ||
|
||
|
||
def get_tensor_placeholders( | ||
gm: torch.fx.GraphModule, | ||
) -> List[torch.fx.Node]: | ||
"""Returns placeholder nodes of GraphModule which are torch.Tensor types""" | ||
# Tensor placeholders must be subclasses of torch.Tensor | ||
placeholders = [ | ||
node | ||
for node in gm.graph.nodes | ||
if ( | ||
node.op == "placeholder" | ||
and isinstance(node.type, type) | ||
and issubclass(node.type, torch.Tensor) | ||
) | ||
] | ||
|
||
return placeholders |
43 changes: 43 additions & 0 deletions
43
py/torch_tensorrt/dynamo/lowering/passes/remove_input_alias_fixing_clones.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
import logging | ||
|
||
import torch | ||
from torch_tensorrt.dynamo.lowering.passes.pass_utils import ( | ||
clean_up_graph_after_modifications, | ||
) | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
# TODO: Delete this lowering pass once aot_export_joint_simple is patched | ||
def remove_input_alias_fixing_clones(gm: torch.fx.GraphModule) -> torch.fx.GraphModule: | ||
"""Remove the auxiliary clone nodes inserted to fix input aliasing | ||
See: https://github.com/pytorch/pytorch/issues/108079 | ||
""" | ||
modified_graph = False | ||
|
||
for node in gm.graph.nodes: | ||
# If the node is a placeholder and its only user is a clone node | ||
# it was modified by the input alias-fixing pass, and the change | ||
# needs to be undone | ||
if ( | ||
node.op == "placeholder" | ||
and len(node.users) == 1 | ||
and list(node.users)[0].target == torch.ops.aten.clone.default | ||
): | ||
modified_graph = True | ||
|
||
# Replace all uses of the clone with the placholder, delete the clone | ||
clone_node = list(node.users)[0] | ||
logger.debug( | ||
f"Removing node {clone_node} from graph, since it is a clone node which " | ||
f"is the only user of placeholder {node} and was inserted by the compiler." | ||
) | ||
clone_node.replace_all_uses_with(node) | ||
gm.graph.erase_node(clone_node) | ||
|
||
if modified_graph: | ||
gm = clean_up_graph_after_modifications(gm) | ||
logger.debug(f"Removed auxiliary clone nodes for placeholders:\n{gm.graph}") | ||
|
||
return gm |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.