[Graph Optimization] Add full_cuda_graph to control subgraph split (#6027)

This commit is contained in:
Ryan
2026-01-14 11:43:59 +08:00
committed by GitHub
parent 456637002d
commit 0d1a5e70bc
6 changed files with 21 additions and 1 deletions
@@ -164,6 +164,20 @@ class AppendAttentionBackend(AttentionBackend):
self.rank, self.device_id = init_rank_and_device_id(fd_config)
self.use_output = not fd_config.graph_opt_config.full_cuda_graph
if self.use_output:
flag = "FLAGS_cuda_graph_blacklist"
paddle.set_flags(
{
flag: ",".join(
list(
set(
paddle.get_flags(flag)[flag].split(",")
+ ["custom_op.static_op_append_attention_with_output_"]
)
)
)
}
)
self.fd_config = fd_config
def init_attention_metadata(self, forward_meta: ForwardMeta):