Skip to main content

problemreductions/rules/
expectedretrievalcost_ilp.rs

1//! Reduction from ExpectedRetrievalCost to ILP (Integer Linear Programming).
2//!
3//! The expected retrieval cost objective is quadratic in the assignment variables,
4//! so McCormick linearization is used to produce a binary ILP:
5//!
6//! Variables:
7//! - x_{r,s}: binary, record r placed in sector s (index: r * num_sectors + s)
8//! - z_{r,s,r',s'}: binary product linearization for x_{r,s} * x_{r',s'} (index after x vars)
9//!
10//! Constraints:
11//! - Assignment: Σ_s x_{r,s} = 1 for each r
12//! - McCormick for each (r,s,r',s') product:
13//!   z ≤ x_{r,s}, z ≤ x_{r',s'}, z ≥ x_{r,s} + x_{r',s'} - 1
14//!
15//! Objective: Minimize Σ_{r,s,r',s'} lat(s,s') * p_r * p_{r'} * z_{r,s,r',s'}
16
17use crate::models::algebraic::{LinearConstraint, ObjectiveSense, ILP};
18use crate::models::misc::ExpectedRetrievalCost;
19use crate::reduction;
20use crate::rules::traits::{ReduceTo, ReductionResult};
21
22/// Compute the latency distance between sectors on a circular device.
23///
24/// Returns the number of sectors between source and target (not counting source itself),
25/// wrapping around. This matches the `latency_distance` function in the model.
26fn latency_distance(num_sectors: usize, source: usize, target: usize) -> usize {
27    if source < target {
28        target - source - 1
29    } else {
30        num_sectors - source + target - 1
31    }
32}
33
34/// Result of reducing ExpectedRetrievalCost to ILP.
35///
36/// Variable layout:
37/// - x_{r,s} at index r * num_sectors + s  (0..num_records * num_sectors)
38/// - z_{r,s,r',s'} at index num_records*num_sectors + (r * num_sectors + s) * (num_records * num_sectors) + (r' * num_sectors + s')
39///
40/// Total: num_records * num_sectors + (num_records * num_sectors)^2 variables.
41#[derive(Debug, Clone)]
42pub struct ReductionERCToILP {
43    target: ILP<bool>,
44    num_records: usize,
45    num_sectors: usize,
46}
47
48impl ReductionERCToILP {
49    fn x_var(&self, r: usize, s: usize) -> usize {
50        r * self.num_sectors + s
51    }
52
53    fn z_var(&self, r: usize, s: usize, r2: usize, s2: usize) -> usize {
54        let n = self.num_records * self.num_sectors;
55        n + (r * self.num_sectors + s) * n + (r2 * self.num_sectors + s2)
56    }
57}
58
59impl ReductionResult for ReductionERCToILP {
60    type Source = ExpectedRetrievalCost;
61    type Target = ILP<bool>;
62
63    fn target_problem(&self) -> &ILP<bool> {
64        &self.target
65    }
66
67    /// Extract solution: for each record r, find the unique sector s where x_{r,s} = 1.
68    fn extract_solution(&self, target_solution: &[usize]) -> Vec<usize> {
69        let num_sectors = self.num_sectors;
70        (0..self.num_records)
71            .map(|r| {
72                (0..num_sectors)
73                    .find(|&s| {
74                        let idx = r * num_sectors + s;
75                        idx < target_solution.len() && target_solution[idx] == 1
76                    })
77                    .unwrap_or(0)
78            })
79            .collect()
80    }
81}
82
83#[reduction(
84    overhead = {
85        num_vars = "num_records * num_sectors + num_records^2 * num_sectors^2",
86        num_constraints = "num_records + 3 * num_records^2 * num_sectors^2",
87    }
88)]
89impl ReduceTo<ILP<bool>> for ExpectedRetrievalCost {
90    type Result = ReductionERCToILP;
91
92    fn reduce_to(&self) -> Self::Result {
93        let num_records = self.num_records();
94        let num_sectors = self.num_sectors();
95        let n = num_records * num_sectors; // total x variables
96        let num_vars = n + n * n;
97
98        let result = ReductionERCToILP {
99            target: ILP::empty(),
100            num_records,
101            num_sectors,
102        };
103
104        let mut constraints = Vec::new();
105
106        // Assignment constraints: for each record r, Σ_s x_{r,s} = 1
107        for r in 0..num_records {
108            let terms: Vec<(usize, f64)> = (0..num_sectors)
109                .map(|s| (result.x_var(r, s), 1.0))
110                .collect();
111            constraints.push(LinearConstraint::eq(terms, 1.0));
112        }
113
114        // McCormick linearization constraints for each product z_{r,s,r',s'}
115        // z ≤ x_{r,s}      →  z - x_{r,s} ≤ 0
116        // z ≤ x_{r',s'}    →  z - x_{r',s'} ≤ 0
117        // z ≥ x_{r,s} + x_{r',s'} - 1  →  -z + x_{r,s} + x_{r',s'} ≤ 1
118        for r in 0..num_records {
119            for s in 0..num_sectors {
120                for r2 in 0..num_records {
121                    for s2 in 0..num_sectors {
122                        let z = result.z_var(r, s, r2, s2);
123                        let x1 = result.x_var(r, s);
124                        let x2 = result.x_var(r2, s2);
125
126                        // z ≤ x_{r,s}: z - x_{r,s} ≤ 0
127                        constraints.push(LinearConstraint::le(vec![(z, 1.0), (x1, -1.0)], 0.0));
128                        // z ≤ x_{r',s'}: z - x_{r',s'} ≤ 0
129                        constraints.push(LinearConstraint::le(vec![(z, 1.0), (x2, -1.0)], 0.0));
130                        // z ≥ x_{r,s} + x_{r',s'} - 1: -z + x_{r,s} + x_{r',s'} ≤ 1
131                        constraints.push(LinearConstraint::le(
132                            vec![(z, -1.0), (x1, 1.0), (x2, 1.0)],
133                            1.0,
134                        ));
135                    }
136                }
137            }
138        }
139
140        // Objective: Minimize Σ_{r,s,r',s'} lat(s,s') * p_r * p_{r'} * z_{r,s,r',s'}
141        let probabilities = self.probabilities();
142        let mut objective: Vec<(usize, f64)> = Vec::new();
143        for r in 0..num_records {
144            for s in 0..num_sectors {
145                for r2 in 0..num_records {
146                    for s2 in 0..num_sectors {
147                        let lat = latency_distance(num_sectors, s, s2) as f64;
148                        if lat > 0.0 {
149                            let coeff = lat * probabilities[r] * probabilities[r2];
150                            if coeff.abs() > 0.0 {
151                                let z = result.z_var(r, s, r2, s2);
152                                objective.push((z, coeff));
153                            }
154                        }
155                    }
156                }
157            }
158        }
159
160        let target = ILP::new(num_vars, constraints, objective, ObjectiveSense::Minimize);
161
162        ReductionERCToILP {
163            target,
164            num_records,
165            num_sectors,
166        }
167    }
168}
169
170#[cfg(feature = "example-db")]
171pub(crate) fn canonical_rule_example_specs() -> Vec<crate::example_db::specs::RuleExampleSpec> {
172    use crate::export::SolutionPair;
173
174    vec![crate::example_db::specs::RuleExampleSpec {
175        id: "expectedretrievalcost_to_ilp",
176        build: || {
177            // 2 records with probabilities [0.5, 0.5], 2 sectors
178            // Assignment: record 0 → sector 0, record 1 → sector 1
179            let source = ExpectedRetrievalCost::new(vec![0.5, 0.5], 2);
180            // Compute target_config from solver to ensure consistency
181            let reduction: ReductionERCToILP = ReduceTo::<ILP<bool>>::reduce_to(&source);
182            let solver = crate::solvers::ILPSolver::new();
183            let target_config = solver
184                .solve(reduction.target_problem())
185                .expect("canonical example should be feasible");
186            crate::example_db::specs::rule_example_with_witness::<_, ILP<bool>>(
187                source,
188                SolutionPair {
189                    source_config: vec![0, 1],
190                    target_config,
191                },
192            )
193        },
194    }]
195}
196
197#[cfg(test)]
198#[path = "../unit_tests/rules/expectedretrievalcost_ilp.rs"]
199mod tests;