@@ -62,15 +62,6 @@ struct OptRes {
6262 container::vector<uint16_t > subnets;
6363};
6464
65- // constexpr auto cse_hasher = [](TNMeta const& data) -> size_t {
66- // return data.hash_value();
67- // };
68-
69- // constexpr auto cse_equal = [](TNMeta const& left,
70- // TNMeta const& right) -> bool {
71- // return bliss::ConstGraphCmp::cmp(*left.graph, *right.graph) == 0;
72- // };
73-
7465struct SubNetHash {
7566 size_t operator ()(
7667 TensorNetwork::SlotCanonicalizationMetadata const & data) const noexcept {
@@ -95,30 +86,35 @@ struct SubNetEqual {
9586// /
9687// / \tparam CostFn A function object type that computes the cost of a single
9788// / binary contraction.
98- // / Expected signature: \code double(meta::range_of<Index> auto
99- // / const& lhs, meta::range_of<Index> auto const& rhs,
100- // / meta::range_of<Index> auto const& res) \endcode
89+ // / Expected signature:
90+ // \code double(meta::range_of<Index> auto const& lhs,
91+ // meta::range_of<Index> auto const& rhs,
92+ // / meta::range_of<Index> auto const& res)
93+ // \endcode
10194// /
10295// / \param network The \ref TensorNetwork containing the tensors to be
103- // / contracted. \param tidxs The set of indices that should remain open in the
104- // / final result. \param cost_fn The cost model used to evaluate contractions
105- // / (e.g., flop count). \param subnet_cse If true, enables Common Subexpression
96+ // / contracted.
97+ // \param tidxs The set of indices that should remain open in the
98+ // / final result.
99+ // \param cost_fn The cost model used to evaluate contractions
100+ // / (e.g., flop count).
101+ // \param subnet_cse If true, enables Common Subexpression
106102// / Elimination (CSE) for
107- // / equivalent subnetworks. When enabled, the cost of
108- // / evaluating structurally identical subnetworks is counted
109- // / only once in the total cost of a contraction tree.
110- // / Equivalence is determined by canonicalizing the subnetwork
111- // / graph.
103+ // / equivalent subnetworks. When enabled, the cost of
104+ // / evaluating structurally identical subnetworks is counted
105+ // / only once in the total cost of a contraction tree.
106+ // / Equivalence is determined by canonicalizing the subnetwork
107+ // / graph.
112108// /
113109// / \return An \ref EvalSequence representing the optimal contraction order.
114110// /
115111// / \details The optimization uses a bitmask-based dynamic programming approach
116- // / where each state represents a subnetwork (subset of tensors).
117- // / If \p subnet_cse is enabled, the algorithm precomputes canonical
118- // / metadata for every possible subnetwork to identify common
119- // / structures. This allows it to find trees that benefit from reusing
120- // / intermediate results, which is particularly effective for
121- // / expressions with repeating tensor patterns.
112+ // / where each state represents a subnetwork (subset of tensors).
113+ // / If \p subnet_cse is enabled, the algorithm precomputes canonical
114+ // / metadata for every possible subnetwork to identify common
115+ // / structures. This allows it to find trees that benefit from reusing
116+ // / intermediate results, which is particularly effective for
117+ // / expressions with repeating tensor patterns.
122118// /
123119template <typename CostFn>
124120 requires requires (CostFn&& fn, decltype (OptRes::indices) const & ixs) {
0 commit comments