1use acir::AcirField;
2use acvm_blackbox_solver::BlackBoxFunctionSolver;
3use num_bigint::BigUint;
4
5use crate::{
6 BinaryFieldOp, BinaryIntOp, MemoryValue, NextOpcodePositionOrState, OpcodePosition, VM,
7};
8use std::collections::HashMap;
9
10const FUZZING_COMPARISON_TRUE_STATE: usize = usize::MAX - 1;
12const FUZZING_COMPARISON_FALSE_STATE: usize = usize::MAX;
14
15const FUZZING_COMPARISON_LOG_RANGE_START_STATE: usize = 0;
17
18pub type Branch = (OpcodePosition, NextOpcodePositionOrState);
20
21pub type UniqueFeatureIndex = usize;
23
24pub type BranchToFeatureMap = HashMap<Branch, UniqueFeatureIndex>;
26
27#[derive(Debug, PartialEq, Eq, Clone, Default)]
29pub(super) struct FuzzingTrace {
30 trace: Vec<u32>,
35 branch_to_feature_map: HashMap<(usize, usize), usize>,
38}
39
40impl FuzzingTrace {
41 fn branch_state(cond: bool) -> usize {
42 if cond { FUZZING_COMPARISON_TRUE_STATE } else { FUZZING_COMPARISON_FALSE_STATE }
43 }
44
45 fn log_range_state(log: usize) -> usize {
46 FUZZING_COMPARISON_LOG_RANGE_START_STATE + log
47 }
48
49 pub(super) fn new(branch_to_feature_map: HashMap<(usize, usize), usize>) -> Self {
50 let len = branch_to_feature_map.len();
51 Self { trace: vec![0; len], branch_to_feature_map }
52 }
53
54 fn record_branch(&mut self, pc: usize, destination: usize) {
55 let index = self.branch_to_feature_map[&(pc, destination)];
56 self.trace[index] += 1;
57 }
58
59 fn record_conditional_mov(&mut self, pc: usize, branch: bool) {
60 let index = self.branch_to_feature_map[&(pc, Self::branch_state(branch))];
61 self.trace[index] += 1;
62 }
63
64 fn record_binary_field_op_comparison<F: AcirField>(
65 &mut self,
66 pc: usize,
67 op: &BinaryFieldOp,
68 lhs: MemoryValue<F>,
69 rhs: MemoryValue<F>,
70 result: MemoryValue<F>,
71 ) {
72 match op {
73 BinaryFieldOp::Equals | BinaryFieldOp::LessThan | BinaryFieldOp::LessThanEquals => {
74 let MemoryValue::Field(a) = lhs else {
75 return;
76 };
77 let MemoryValue::Field(b) = rhs else {
78 return;
79 };
80 let MemoryValue::U1(c) = result else {
81 return;
82 };
83
84 let diff_log = BigUint::from_bytes_be(&(b - a).to_be_bytes()).bits();
86
87 let approach_index =
88 self.branch_to_feature_map[&(pc, Self::log_range_state(diff_log as usize))];
89
90 let condition_index = self.branch_to_feature_map[&(pc, Self::branch_state(c))];
91
92 self.trace[approach_index] += 1;
93 self.trace[condition_index] += 1;
94 }
95 BinaryFieldOp::Add
96 | BinaryFieldOp::Sub
97 | BinaryFieldOp::Mul
98 | BinaryFieldOp::Div
99 | BinaryFieldOp::IntegerDiv => {}
100 }
101 }
102
103 fn record_binary_int_op_comparison<F: AcirField>(
104 &mut self,
105 pc: usize,
106 op: &BinaryIntOp,
107 lhs: MemoryValue<F>,
108 rhs: MemoryValue<F>,
109 result: MemoryValue<F>,
110 ) {
111 match op {
112 BinaryIntOp::Equals | BinaryIntOp::LessThan | BinaryIntOp::LessThanEquals => {
113 let lhs_val = lhs.to_u128().expect("lhs is not an integer");
114 let rhs_val = rhs.to_u128().expect("rhs is not an integer");
115 let MemoryValue::U1(c) = result else {
116 return;
117 };
118
119 let diff_log =
120 rhs_val.abs_diff(lhs_val).checked_ilog2().map_or_else(|| 0, |x| x + 1);
121
122 let approach_index =
123 self.branch_to_feature_map[&(pc, Self::log_range_state(diff_log as usize))];
124
125 let condition_index = self.branch_to_feature_map[&(pc, Self::branch_state(c))];
126
127 self.trace[approach_index] += 1;
128 self.trace[condition_index] += 1;
129 }
130 BinaryIntOp::Add
131 | BinaryIntOp::Sub
132 | BinaryIntOp::Mul
133 | BinaryIntOp::Div
134 | BinaryIntOp::And
135 | BinaryIntOp::Or
136 | BinaryIntOp::Xor
137 | BinaryIntOp::Shl
138 | BinaryIntOp::Shr => {}
139 }
140 }
141
142 pub(super) fn get_trace(&self) -> Vec<u32> {
143 self.trace.clone()
144 }
145}
146
147impl<F: AcirField, B: BlackBoxFunctionSolver<F>> VM<'_, F, B> {
148 pub(super) fn fuzzing_trace_binary_field_op_comparison(
150 &mut self,
151 op: &BinaryFieldOp,
152 lhs: MemoryValue<F>,
153 rhs: MemoryValue<F>,
154 result: MemoryValue<F>,
155 ) {
156 if let Some(ref mut trace) = self.fuzzing_trace {
157 trace.record_binary_field_op_comparison(self.program_counter, op, lhs, rhs, result);
158 }
159 }
160
161 pub(super) fn fuzzing_trace_binary_int_op_comparison(
163 &mut self,
164 op: &BinaryIntOp,
165 lhs: MemoryValue<F>,
166 rhs: MemoryValue<F>,
167 result: MemoryValue<F>,
168 ) {
169 if let Some(ref mut trace) = self.fuzzing_trace {
170 trace.record_binary_int_op_comparison(self.program_counter, op, lhs, rhs, result);
171 }
172 }
173
174 pub(super) fn fuzzing_trace_branching(&mut self, destination: NextOpcodePositionOrState) {
176 if let Some(ref mut trace) = self.fuzzing_trace {
177 trace.record_branch(self.program_counter, destination);
178 }
179 }
180
181 pub(super) fn fuzzing_trace_conditional_mov(&mut self, branch: bool) {
183 if let Some(ref mut trace) = self.fuzzing_trace {
184 trace.record_conditional_mov(self.program_counter, branch);
185 }
186 }
187}