PandA-2024.02
Bit_Value_forward.cpp
Go to the documentation of this file.
1 /*
2  *
3  * _/_/_/ _/_/ _/ _/ _/_/_/ _/_/
4  * _/ _/ _/ _/ _/_/ _/ _/ _/ _/ _/
5  * _/_/_/ _/_/_/_/ _/ _/_/ _/ _/ _/_/_/_/
6  * _/ _/ _/ _/ _/ _/ _/ _/ _/
7  * _/ _/ _/ _/ _/ _/_/_/ _/ _/
8  *
9  * ***********************************************
10  * PandA Project
11  * URL: http://panda.dei.polimi.it
12  * Politecnico di Milano - DEIB
13  * System Architectures Group
14  * ***********************************************
15  * Copyright (C) 2004-2024 Politecnico di Milano
16  *
17  * This file is part of the PandA framework.
18  *
19  * The PandA framework is free software; you can redistribute it and/or modify
20  * it under the terms of the GNU General Public License as published by
21  * the Free Software Foundation; either version 3 of the License, or
22  * (at your option) any later version.
23  *
24  * This program is distributed in the hope that it will be useful,
25  * but WITHOUT ANY WARRANTY; without even the implied warranty of
26  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27  * GNU General Public License for more details.
28  *
29  * You should have received a copy of the GNU General Public License
30  * along with this program. If not, see <http://www.gnu.org/licenses/>.
31  *
32  */
40 // include class header
41 #include "Bit_Value.hpp"
42 
43 #include "Dominance.hpp"
44 #include "Parameter.hpp"
45 #include "basic_block.hpp"
46 #include "call_graph_manager.hpp"
47 #include "function_behavior.hpp"
48 
49 // include from tree/
50 #include "Range.hpp"
51 #include "application_manager.hpp"
52 #include "tree_basic_block.hpp"
53 #include "tree_helper.hpp"
54 #include "tree_manager.hpp"
55 #include "tree_node.hpp"
56 #include "tree_reindex.hpp"
57 
58 #include "dbgPrintHelper.hpp" // for DEBUG_LEVEL_
59 #include "string_manipulation.hpp"
60 #include <boost/range/adaptors.hpp>
61 #include <unordered_set>
62 
63 std::deque<bit_lattice> Bit_Value::get_current(const tree_nodeConstRef& tn) const
64 {
65  const auto nid = GET_INDEX_CONST_NODE(tn);
66  const auto node = GET_CONST_NODE(tn);
67  if(node->get_kind() == ssa_name_K || node->get_kind() == parm_decl_K)
68  {
69  THROW_ASSERT(current.count(nid), "");
70  return current.at(nid);
71  }
72  else if(GetPointer<const cst_node>(node))
73  {
74  THROW_ASSERT(best.count(nid), "");
75  return best.at(nid);
76  }
77  THROW_UNREACHABLE("Unexpected node kind: " + node->get_kind_text());
78  return std::deque<bit_lattice>();
79 }
80 
82 {
83  std::deque<tree_nodeConstRef> working_list, return_list;
84  CustomUnorderedSet<unsigned int> working_list_idx;
85  const auto push_back = [&](const tree_nodeConstRef& stmt) {
86  const auto stmt_kind = stmt->get_kind();
87  if(!working_list_idx.count(stmt->index) && (stmt_kind == gimple_assign_K || stmt_kind == gimple_phi_K ||
88  stmt_kind == gimple_return_K || stmt_kind == gimple_asm_K))
89  {
90  working_list.push_back(stmt);
91  working_list_idx.insert(stmt->index);
92  }
93  };
94  const auto pop_front = [&]() -> tree_nodeConstRef {
95  const auto stmt = working_list.front();
96  working_list.pop_front();
97  working_list_idx.erase(stmt->index);
98  return stmt;
99  };
100  for(const auto& bb : bb_topological)
101  {
102  for(const auto& phi : bb->CGetPhiList())
103  {
104  const auto phi_node = GET_CONST_NODE(phi);
105  const auto gp = GetPointerS<const gimple_phi>(phi_node);
106  if(!gp->virtual_flag)
107  {
108  if(IsHandledByBitvalue(gp->res))
109  {
110  push_back(phi_node);
111  }
112  }
113  }
114  for(const auto& stmt : bb->CGetStmtList())
115  {
116  const auto stmt_node = GET_CONST_NODE(stmt);
117  if(stmt_node->get_kind() == gimple_assign_K)
118  {
119  const auto ga = GetPointerS<const gimple_assign>(stmt_node);
120  if(IsHandledByBitvalue(ga->op0))
121  {
122  push_back(stmt_node);
123  }
124  }
125  else if(stmt_node->get_kind() == gimple_return_K)
126  {
127  return_list.push_back(stmt_node);
128  }
129  }
130  }
131 
132  const auto is_root_function = AppM->CGetCallGraphManager()->GetRootFunctions().count(function_id);
133  while(!working_list.empty())
134  {
135  const auto stmt_node = pop_front();
136  const auto stmt_kind = stmt_node->get_kind();
137  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "-->Analyzing " + STR(stmt_node));
138  if(stmt_kind == gimple_assign_K)
139  {
140  const auto ga = GetPointerS<const gimple_assign>(stmt_node);
141  const auto output_nid = GET_INDEX_CONST_NODE(ga->op0);
142  const auto ssa = GetPointer<const ssa_name>(GET_CONST_NODE(ga->op0));
143  if(ssa)
144  {
145  if(!IsHandledByBitvalue(ga->op0) || ssa->CGetUseStmts().empty())
146  {
148  "<--variable " + STR(ssa) + " of type " + STR(tree_helper::CGetType(ga->op0)) +
149  " not considered");
150  continue;
151  }
152  bool hasRequiredValues = true;
153  std::vector<std::tuple<unsigned int, unsigned int>> vars_read;
154  tree_helper::get_required_values(vars_read, TM->GetTreeNode(stmt_node->index));
155  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "---Requires " + STR(vars_read.size()) + " values");
156  for(const auto& var_pair : vars_read)
157  {
158  const auto use_node_id = std::get<0>(var_pair);
159  if(use_node_id == 0)
160  {
161  continue;
162  }
163  const auto use_node = TM->CGetTreeNode(use_node_id);
164  if(!IsHandledByBitvalue(use_node))
165  {
166  continue;
167  }
168  const auto ssa_use = GetPointer<const ssa_name>(use_node);
169 
170  if(ssa_use && !current.count(use_node_id))
171  {
172  const auto def_stmt = ssa_use->CGetDefStmt();
173  if(GET_CONST_NODE(def_stmt)->get_kind() == gimple_nop_K)
174  {
175  THROW_ASSERT(!ssa_use->var || GET_CONST_NODE(ssa_use->var)->get_kind() != parm_decl_K,
176  "Function parameter bitvalue must be defined before");
177  current.insert(std::make_pair(
179  IsSignedIntegerType(use_node))));
180  }
181  else
182  {
184  "---Input " + ssa_use->ToString() + " definition has not been analyzed yet");
186  "---Definition statement added to queue: " + GET_CONST_NODE(def_stmt)->ToString());
187  push_back(GET_CONST_NODE(def_stmt));
188  hasRequiredValues = false;
189  continue;
190  }
191  }
192  }
193  if(hasRequiredValues)
194  {
195  auto res = forward_transfer(ga);
196  current.insert(std::make_pair(output_nid, best.at(output_nid)));
197  if(update_current(res, ga->op0))
198  {
200  "---current updated: " + bitstring_to_string(current.at(output_nid)));
201  for(const auto& next_node : ssa->CGetUseStmts())
202  {
203  push_back(GET_CONST_NODE(next_node.first));
204  }
205  }
206  }
207  else
208  {
210  "---Inputs not fully analyzed by the forward Bit Value Analysis. Operation " +
211  GET_NODE(ga->op0)->ToString() + " postponed");
212  push_back(stmt_node);
213  }
214  }
215  }
216  else if(stmt_kind == gimple_phi_K)
217  {
218  const auto gp = GetPointerS<const gimple_phi>(stmt_node);
219  THROW_ASSERT(!gp->virtual_flag, "unexpected case");
220 
221  const auto output_nid = GET_INDEX_CONST_NODE(gp->res);
222  const auto ssa = GetPointerS<const ssa_name>(GET_CONST_NODE(gp->res));
223  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "phi: " + STR(stmt_node->index));
224  if(!IsHandledByBitvalue(gp->res) || ssa->CGetUseStmts().empty())
225  {
227  "<--variable " + STR(ssa) + " of type " + STR(tree_helper::CGetType(gp->res)) +
228  " not considered");
229  continue;
230  }
231 
232  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "res id: " + STR(output_nid));
233  auto res = create_x_bitstring(1);
234  bool atLeastOne = false;
235  bool allInputs = true;
236 #if HAVE_ASSERTS
237  const auto is_signed1 = tree_helper::IsSignedIntegerType(gp->res);
238 #endif
239  for(const auto& def_edge : gp->CGetDefEdgesList())
240  {
241  const auto def_id = GET_INDEX_CONST_NODE(def_edge.first);
242  if(def_id == output_nid)
243  {
245  "---Skipping " + STR(def_edge.first) + " coming from BB" + STR(def_edge.second) +
246  " because of ssa cycle");
247  continue;
248  }
249  if(current.find(def_id) == current.end())
250  {
251  const auto def_node = GET_CONST_NODE(def_edge.first);
252  const auto def_ssa = GetPointer<const ssa_name>(def_node);
253  if(def_ssa)
254  {
255  const auto def_stmt = def_ssa->CGetDefStmt();
256  if(GET_CONST_NODE(def_stmt)->get_kind() == gimple_nop_K)
257  {
258  THROW_ASSERT(!def_ssa->var || GET_CONST_NODE(def_ssa->var)->get_kind() != parm_decl_K,
259  "Function parameter bitvalue must be defined before");
260  current.insert(
261  std::make_pair(def_id, create_bitstring_from_constant(0, BitLatticeManipulator::Size(def_node),
262  IsSignedIntegerType(def_edge.first))));
263  }
264  else
265  {
267  "---Skipping " + STR(def_node) +
268  " no current has been yet computed for this ssa var");
270  "---Definition statement added to queue: " + GET_CONST_NODE(def_stmt)->ToString());
271  push_back(GET_CONST_NODE(def_stmt));
272  allInputs = false;
273  continue;
274  }
275  }
276  else
277  {
278  THROW_ASSERT(best.find(def_id) != best.end(), "unexpected condition");
279  current.insert(std::make_pair(def_id, best.at(def_id)));
280  }
281  }
282  atLeastOne = true;
284  "---Edge " + STR(def_edge.second) + ": " + bitstring_to_string(current.at(def_id)));
285 
286 #if HAVE_ASSERTS
287  const auto is_signed2 = IsSignedIntegerType(def_edge.first);
288 #endif
289  THROW_ASSERT(is_signed2 == is_signed1, STR(stmt_node));
290  res = inf(res, current.at(def_id), gp->res);
292  }
293  if(atLeastOne)
294  {
296  const auto ins = current.insert(std::make_pair(output_nid, res));
297  auto current_updated = ins.second;
298  if(!current_updated)
299  {
300  current_updated = update_current(res, gp->res);
301  }
302  if(current_updated)
303  {
305  "---current updated: " + bitstring_to_string(current.at(output_nid)));
306  for(const auto& next_node : ssa->CGetUseStmts())
307  {
308  push_back(GET_NODE(next_node.first));
309  }
310  }
311  }
312  if(!allInputs)
313  {
314  push_back(stmt_node);
315  }
316  }
317  else if(stmt_kind == gimple_return_K)
318  {
319  if(!is_root_function)
320  {
321  const auto gr = GetPointerS<const gimple_return>(stmt_node);
322  THROW_ASSERT(gr->op, "Empty return should not be a use of any ssa");
323  if(GET_CONST_NODE(gr->op)->get_kind() == ssa_name_K && IsHandledByBitvalue(gr->op))
324  {
325  const auto res = get_current(gr->op);
326  THROW_ASSERT(res.size(), "");
328  auto& output_current = current[function_id];
329  output_current = output_current.size() ? inf(res, output_current, function_id) : res;
330  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "---inf: " + bitstring_to_string(output_current));
331  }
332  }
333  }
334  else if(stmt_kind == gimple_asm_K)
335  {
336  const auto ga = GetPointerS<const gimple_asm>(stmt_node);
337  if(ga->out)
338  {
339  auto tl = GetPointer<const tree_list>(GET_CONST_NODE(ga->out));
340  THROW_ASSERT(tl->valu, "only the first output and so only single output gimple_asm are supported");
341  const auto ssa = GetPointer<const ssa_name>(GET_CONST_NODE(tl->valu));
342  if(ssa && !ssa->CGetUseStmts().empty() && IsHandledByBitvalue(tl->valu))
343  {
344  const auto output_nid = GET_INDEX_CONST_NODE(tl->valu);
345  THROW_ASSERT(best.count(output_nid), "");
346  current[output_nid] = best.at(output_nid);
348  "---current updated: " + bitstring_to_string(current.at(output_nid)));
349  }
350  }
351  }
352  else
353  {
354  THROW_UNREACHABLE("Unhandled statement: " + STR(stmt_node) + "(" + tree_node::GetString(stmt_kind) + ")");
355  }
356  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Analyzed " + STR(stmt_node));
357  }
358  // Update current to perform sup with best after all return statements bitvalues have been propagated
359  if(current.count(function_id))
360  {
361  update_current(current.at(function_id), TM->CGetTreeNode(function_id));
362  }
363 }
364 
365 std::deque<bit_lattice> Bit_Value::forward_transfer(const gimple_assign* ga) const
366 {
367  std::deque<bit_lattice> res;
368  const auto& lhs = ga->op0;
369  const auto& rhs = ga->op1;
370  const auto lhs_signed = IsSignedIntegerType(lhs);
371  const auto lhs_size = BitLatticeManipulator::Size(lhs);
372  const auto rhs_kind = GET_CONST_NODE(rhs)->get_kind();
373  switch(rhs_kind)
374  {
375  case ssa_name_K:
376  case integer_cst_K:
377  {
378  res = get_current(rhs);
379  break;
380  }
381  case addr_expr_K:
382  {
383  const auto ae = GetPointerS<const addr_expr>(GET_CONST_NODE(rhs));
384  const auto address_size = AppM->get_address_bitsize();
385  const auto is_pretty_print_used =
386  parameters->isOption(OPT_pretty_print) ||
387  (parameters->isOption(OPT_discrepancy) && parameters->getOption<bool>(OPT_discrepancy));
388  const auto lt0 = lsb_to_zero(ae, is_pretty_print_used);
390  "---address_size: " + STR(address_size) + " lt0: " + STR(lt0));
391  if(lt0 && address_size > lt0)
392  {
393  res = create_u_bitstring(address_size - lt0);
394  for(auto index = 0u; index < lt0; ++index)
395  {
396  res.push_back(bit_lattice::ZERO);
397  }
398  }
399  break;
400  }
401  case abs_expr_K:
402  case bit_not_expr_K:
403  case convert_expr_K:
404  case negate_expr_K:
405  case nop_expr_K:
406  case truth_not_expr_K:
407  case view_convert_expr_K:
408  {
409  const auto operation = GetPointer<const unary_expr>(GET_CONST_NODE(rhs));
410 
412  {
414  "---operand " + STR(operation->op) + " of type " +
415  STR(tree_helper::CGetType(operation->op)) + " not handled by bitvalue");
416  res = create_u_bitstring(lhs_size);
417  break;
418  }
419  const auto op_signed = IsSignedIntegerType(operation->op);
420  auto op_bitstring = get_current(operation->op);
422  "---forward_transfer, operand(" + STR(GET_INDEX_NODE(operation->op)) +
423  "): " + bitstring_to_string(op_bitstring));
425 
426  if(rhs_kind == abs_expr_K)
427  {
428  const auto op_size = BitLatticeManipulator::Size(operation->op);
429  const auto sign_bit = op_bitstring.front();
430  switch(sign_bit)
431  {
432  case bit_lattice::ZERO:
433  {
434  res = op_bitstring;
435  break;
436  }
437  case bit_lattice::ONE:
438  {
440  for(const auto bit : boost::adaptors::reverse(op_bitstring))
441  {
442  const auto borrow_and_bit_pair = minus_expr_map.at(bit_lattice::ZERO).at(bit).at(borrow);
443  res.push_front(borrow_and_bit_pair.back());
444  borrow = borrow_and_bit_pair.front();
445  }
446  if(res.size() < op_size)
447  {
448  res.push_front(minus_expr_map.at(bit_lattice::ZERO).at(op_bitstring.front()).at(borrow).back());
449  }
450  break;
451  }
452  case bit_lattice::X:
453  case bit_lattice::U:
454  {
455  std::deque<bit_lattice> negated_bitstring;
457  for(const auto& bit : boost::adaptors::reverse(op_bitstring))
458  {
459  const auto borrow_and_bit_pair = minus_expr_map.at(bit_lattice::ZERO).at(bit).at(borrow);
460  negated_bitstring.push_front(borrow_and_bit_pair.back());
461  borrow = borrow_and_bit_pair.front();
462  }
463  if(negated_bitstring.size() < op_size)
464  {
465  negated_bitstring.push_front(
466  minus_expr_map.at(bit_lattice::ZERO).at(op_bitstring.front()).at(borrow).back());
467  }
468  res = inf(op_bitstring, negated_bitstring, lhs);
469  break;
470  }
471  default:
472  {
473  THROW_UNREACHABLE("unexpected bit lattice for sign bit " + bitstring_to_string(op_bitstring));
474  break;
475  }
476  }
477  THROW_ASSERT(lhs_signed && op_signed, "lhs and rhs of an abs_expr must be signed");
478  }
479  else if(rhs_kind == bit_not_expr_K)
480  {
481  if(op_bitstring.size() == 1 && op_bitstring.at(0) == bit_lattice::X &&
482  !tree_helper::IsBooleanType(operation->op) && !op_signed)
483  {
484  op_bitstring.push_front(bit_lattice::ZERO);
485  }
486  if(op_bitstring.size() < lhs_size)
487  {
488  op_bitstring = sign_extend_bitstring(op_bitstring, op_signed, lhs_size);
489  }
490 
491  auto op_it = op_bitstring.rbegin();
492  for(unsigned bit_index = 0; bit_index < lhs_size && op_it != op_bitstring.rend(); ++op_it, ++bit_index)
493  {
494  res.push_front(bit_xor_expr_map.at(*op_it).at(bit_lattice::ONE));
495  }
496  }
497  else if(rhs_kind == convert_expr_K || rhs_kind == nop_expr_K || rhs_kind == view_convert_expr_K)
498  {
499  res = op_bitstring;
500  const auto do_not_extend = lhs_signed && lhs_size == 1 && tree_helper::IsBooleanType(operation->op);
501  if((lhs_signed != op_signed && !do_not_extend) && res.size() < lhs_size)
502  {
503  res = sign_extend_bitstring(res, op_signed, lhs_size);
504  }
505  while(res.size() > lhs_size)
506  {
507  res.pop_front();
508  }
509  }
510  else if(rhs_kind == negate_expr_K)
511  {
513  if(!lhs_signed && lhs_size > op_bitstring.size())
514  {
515  op_bitstring = sign_extend_bitstring(op_bitstring, lhs_signed, lhs_size);
516  }
517  auto op_it = op_bitstring.rbegin();
518  for(unsigned bit_index = 0; bit_index < lhs_size && op_it != op_bitstring.rend(); ++op_it, ++bit_index)
519  {
520  res.push_front(minus_expr_map.at(bit_lattice::ZERO).at(*op_it).at(borrow).back());
521  borrow = minus_expr_map.at(bit_lattice::ZERO).at(*op_it).at(borrow).front();
522  }
523  if(lhs_signed && res.size() < lhs_size)
524  {
525  res.push_front(minus_expr_map.at(bit_lattice::ZERO).at(op_bitstring.front()).at(borrow).back());
526  }
527  }
528  else if(rhs_kind == truth_not_expr_K)
529  {
530  THROW_ASSERT(tree_helper::IsBooleanType(lhs) || lhs_size == 1, "");
531  bit_lattice arg_left = bit_lattice::ZERO;
532  for(auto current_bit : op_bitstring)
533  {
534  if(current_bit == bit_lattice::ONE)
535  {
536  arg_left = bit_lattice::ONE;
537  break;
538  }
539  else if(current_bit == bit_lattice::U)
540  {
541  arg_left = bit_lattice::U;
542  }
543  }
544  res.push_front(bit_xor_expr_map.at(arg_left).at(bit_lattice::ONE));
545  }
546  else
547  {
548  THROW_UNREACHABLE("Unhadled unary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
549  ")");
550  }
551  break;
552  }
553  case bit_and_expr_K:
554  case bit_ior_expr_K:
555  case bit_xor_expr_K:
556  case eq_expr_K:
557  case exact_div_expr_K:
558  case ge_expr_K:
559  case gt_expr_K:
560  case le_expr_K:
561  case lrotate_expr_K:
562  case lshift_expr_K:
563  case lt_expr_K:
564  case max_expr_K:
565  case min_expr_K:
566  case minus_expr_K:
567  case mult_expr_K:
568  case ne_expr_K:
569  case plus_expr_K:
570  case pointer_plus_expr_K:
571  case rrotate_expr_K:
572  case rshift_expr_K:
573  case trunc_div_expr_K:
574  case trunc_mod_expr_K:
575  case truth_and_expr_K:
576  case truth_andif_expr_K:
577  case truth_or_expr_K:
578  case truth_orif_expr_K:
579  case truth_xor_expr_K:
580  case widen_mult_expr_K:
581  case extract_bit_expr_K:
582  {
583  const auto operation = GetPointerS<const binary_expr>(GET_CONST_NODE(rhs));
584 
585  if(!IsHandledByBitvalue(operation->op0))
586  {
588  "---operand " + STR(operation->op0) + " of type " +
589  STR(tree_helper::CGetType(operation->op0)) + " not handled by bitvalue");
590  res = create_u_bitstring(lhs_size);
591  break;
592  }
593 
594  if(!IsHandledByBitvalue(operation->op1))
595  {
597  "---operand " + STR(operation->op1) + " of type " +
598  STR(tree_helper::CGetType(operation->op1)) + " not handled by bitvalue");
599  res = create_u_bitstring(lhs_size);
600  break;
601  }
602 
603  const auto op0_signed = IsSignedIntegerType(operation->op0);
604  auto op0_bitstring = get_current(operation->op0);
605 
606  const auto op1_signed = IsSignedIntegerType(operation->op1);
607  auto op1_bitstring = get_current(operation->op1);
608 
609  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "---forward_transfer");
611  "--- operand0(" + STR(GET_INDEX_NODE(operation->op0)) +
612  "): " + bitstring_to_string(op0_bitstring));
614  "--- operand1(" + STR(GET_INDEX_NODE(operation->op1)) +
615  "): " + bitstring_to_string(op1_bitstring));
617 
618  if(rhs_kind == bit_and_expr_K)
619  {
620  if(lhs_size > op0_bitstring.size())
621  {
622  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
623  }
624  if(lhs_size > op1_bitstring.size())
625  {
626  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, lhs_size);
627  }
628 
629  auto op0_it = op0_bitstring.rbegin();
630  auto op1_it = op1_bitstring.rbegin();
631  for(auto bit_index = 0u;
632  bit_index < lhs_size && op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
633  ++op0_it, ++op1_it, ++bit_index)
634  {
635  res.push_front(bit_and_expr_map.at(*op0_it).at(*op1_it));
636  }
637  }
638  else if(rhs_kind == bit_ior_expr_K)
639  {
640  if(op0_bitstring.size() == 1 && op0_bitstring.at(0) == bit_lattice::X &&
641  !tree_helper::IsBooleanType(operation->op0) && !op0_signed)
642  {
643  op0_bitstring.push_front(bit_lattice::ZERO);
644  }
645  if(op1_bitstring.size() == 1 && op1_bitstring.at(0) == bit_lattice::X &&
646  !tree_helper::IsBooleanType(operation->op1) && !op1_signed)
647  {
648  op1_bitstring.push_front(bit_lattice::ZERO);
649  }
650  if(lhs_size > op1_bitstring.size())
651  {
652  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, lhs_size);
653  }
654  if(lhs_size > op0_bitstring.size())
655  {
656  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
657  }
658 
659  auto op0_it = op0_bitstring.rbegin();
660  auto op1_it = op1_bitstring.rbegin();
661  for(auto bit_index = 0u;
662  bit_index < lhs_size && op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
663  ++op0_it, ++op1_it, ++bit_index)
664  {
665  res.push_front(bit_ior_expr_map.at(*op0_it).at(*op1_it));
666  }
667  }
668  else if(rhs_kind == bit_xor_expr_K)
669  {
670  if(op0_bitstring.size() == 1 && op0_bitstring.at(0) == bit_lattice::X &&
671  !tree_helper::IsBooleanType(operation->op0) && !op0_signed)
672  {
673  op0_bitstring.push_front(bit_lattice::ZERO);
674  }
675  if(op1_bitstring.size() == 1 && op1_bitstring.at(0) == bit_lattice::X &&
676  !tree_helper::IsBooleanType(operation->op1) && !op1_signed)
677  {
678  op1_bitstring.push_front(bit_lattice::ZERO);
679  }
680 
681  if(lhs_size > op1_bitstring.size())
682  {
683  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, lhs_size);
684  }
685  if(lhs_size > op0_bitstring.size())
686  {
687  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
688  }
689 
690  auto op0_it = op0_bitstring.rbegin();
691  auto op1_it = op1_bitstring.rbegin();
692  for(auto bit_index = 0u;
693  bit_index < lhs_size && op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
694  ++op0_it, ++op1_it, ++bit_index)
695  {
696  res.push_front(bit_xor_expr_map.at(*op0_it).at(*op1_it));
697  }
698  }
699  else if(rhs_kind == eq_expr_K)
700  {
701  if(op0_bitstring.size() > op1_bitstring.size())
702  {
703  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
704  }
705  if(op1_bitstring.size() > op0_bitstring.size())
706  {
707  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
708  }
709 
710  auto op0_bitstring_it = op0_bitstring.begin();
711  auto op1_bitstring_it = op1_bitstring.begin();
712  auto computed_result = false;
713  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
714  ++op0_bitstring_it, ++op1_bitstring_it)
715  {
716  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
717  {
718  // <U> is UNKNOWN
719  res.push_front(bit_lattice::U);
720  computed_result = true;
721  break;
722  }
723  else if((*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE) ||
724  (*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO))
725  {
726  // <0> is FALSE
727  res.push_front(bit_lattice::ZERO);
728  computed_result = true;
729  break;
730  }
731  }
732  if(!computed_result)
733  {
734  // If the result is not computed until now the bitstring are equal
735  // <1> is TRUE
736  res.push_front(bit_lattice::ONE);
737  }
738  }
739  else if(rhs_kind == exact_div_expr_K || rhs_kind == trunc_div_expr_K)
740  {
741  if(op0_signed)
742  {
743  res = create_u_bitstring(1u + static_cast<unsigned int>(op0_bitstring.size()));
744  }
745  else
746  {
747  res = create_u_bitstring(static_cast<unsigned int>(op0_bitstring.size()));
748  }
749  while(res.size() > lhs_size)
750  {
751  res.pop_front();
752  }
753  }
754  else if(rhs_kind == ge_expr_K)
755  {
756  if(op0_bitstring.size() > op1_bitstring.size())
757  {
758  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
759  }
760  if(op1_bitstring.size() > op0_bitstring.size())
761  {
762  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
763  }
764 
765  const auto is_signed_var = op0_signed || op1_signed;
766  auto op0_bitstring_it = op0_bitstring.begin();
767  auto op1_bitstring_it = op1_bitstring.begin();
768  auto computed_result = false;
769  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
770  ++op0_bitstring_it, ++op1_bitstring_it)
771  {
772  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
773  {
774  // <U> is UNKNOWN
775  res.push_front(bit_lattice::U);
776  computed_result = true;
777  break;
778  }
779  else if(*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO)
780  {
781  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
782  {
783  // <0> is FALSE
784  res.push_front(bit_lattice::ZERO);
785  }
786  else
787  {
788  // <1> is TRUE
789  res.push_front(bit_lattice::ONE);
790  }
791  computed_result = true;
792  break;
793  }
794  else if(*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE)
795  {
796  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
797  {
798  // <1> is TRUE
799  res.push_front(bit_lattice::ONE);
800  }
801  else
802  {
803  // <0> is FALSE
804  res.push_front(bit_lattice::ZERO);
805  }
806  computed_result = true;
807  break;
808  }
809  }
810  if(!computed_result)
811  {
812  // If the result is not computed until now the bitstring are equal
813  // <1> is TRUE
814  res.push_front(bit_lattice::ONE);
815  }
816  }
817  else if(rhs_kind == gt_expr_K)
818  {
819  if(op0_bitstring.size() > op1_bitstring.size())
820  {
821  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
822  }
823  if(op1_bitstring.size() > op0_bitstring.size())
824  {
825  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
826  }
827 
828  const auto is_signed_var = op0_signed || op1_signed;
829  auto op0_bitstring_it = op0_bitstring.begin();
830  auto op1_bitstring_it = op1_bitstring.begin();
831  auto computed_result = false;
832  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
833  ++op0_bitstring_it, ++op1_bitstring_it)
834  {
835  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
836  {
837  // <U> is UNKNOWN
838  res.push_front(bit_lattice::U);
839  computed_result = true;
840  break;
841  }
842  else if(*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO)
843  {
844  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
845  {
846  // <0> is FALSE
847  res.push_front(bit_lattice::ZERO);
848  }
849  else
850  {
851  // <1> is TRUE
852  res.push_front(bit_lattice::ONE);
853  }
854  computed_result = true;
855  break;
856  }
857  else if(*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE)
858  {
859  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
860  {
861  // <1> is TRUE
862  res.push_front(bit_lattice::ONE);
863  }
864  else
865  {
866  // <0> is FALSE
867  res.push_front(bit_lattice::ZERO);
868  }
869  computed_result = true;
870  break;
871  }
872  }
873  if(!computed_result)
874  {
875  // If the result is not computed until now the bitstring are equal
876  // <0> is FALSE
877  res.push_front(bit_lattice::ZERO);
878  }
879  }
880  else if(rhs_kind == le_expr_K)
881  {
882  if(op0_bitstring.size() > op1_bitstring.size())
883  {
884  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
885  }
886  if(op1_bitstring.size() > op0_bitstring.size())
887  {
888  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
889  }
890 
891  const auto is_signed_var = op0_signed || op1_signed;
892  auto op0_bitstring_it = op0_bitstring.begin();
893  auto op1_bitstring_it = op1_bitstring.begin();
894  auto computed_result = false;
895  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
896  ++op0_bitstring_it, ++op1_bitstring_it)
897  {
898  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
899  {
900  // <U> is UNKNOWN
901  res.push_front(bit_lattice::U);
902  computed_result = true;
903  break;
904  }
905  else if(*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE)
906  {
907  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
908  {
909  // <0> is FALSE
910  res.push_front(bit_lattice::ZERO);
911  }
912  else
913  {
914  // <1> is TRUE
915  res.push_front(bit_lattice::ONE);
916  }
917  computed_result = true;
918  break;
919  }
920  else if(*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO)
921  {
922  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
923  {
924  // <1> is TRUE
925  res.push_front(bit_lattice::ONE);
926  }
927  else
928  {
929  // <0> is FALSE
930  res.push_front(bit_lattice::ZERO);
931  }
932  computed_result = true;
933  break;
934  }
935  }
936  if(!computed_result)
937  {
938  // If the result is not computed until now the bitstring are equal
939  // <1> is TRUE
940  res.push_front(bit_lattice::ONE);
941  }
942  }
943  else if(rhs_kind == lrotate_expr_K)
944  {
945  if(GET_CONST_NODE(operation->op1)->get_kind() == ssa_name_K)
946  {
947  res = create_u_bitstring(lhs_size);
948  }
949  else if(GET_CONST_NODE(operation->op1)->get_kind() == integer_cst_K)
950  {
951  const auto arg2_value = tree_helper::GetConstValue(operation->op1);
952 
953  if(lhs_size > op0_bitstring.size())
954  {
955  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
956  }
957  res = op0_bitstring;
958  for(integer_cst_t index = 0; index < arg2_value; ++index)
959  {
960  bit_lattice cur_bit = res.front();
961  res.pop_front();
962  res.push_back(cur_bit);
963  }
964  }
965  else
966  {
967  THROW_ERROR("unexpected case");
968  }
969  }
970  else if(rhs_kind == lshift_expr_K)
971  {
972  if(GET_CONST_NODE(operation->op1)->get_kind() == ssa_name_K)
973  {
974  const auto bsize_elev2 = 1ULL << op1_bitstring.size();
975  if(lhs_size < bsize_elev2 || lhs_size < bsize_elev2 + op0_bitstring.size())
976  {
977  res = create_u_bitstring(lhs_size);
978  }
979  else
980  {
981  res = create_u_bitstring(static_cast<unsigned int>(op0_bitstring.size() + bsize_elev2));
982  }
983  }
984  else if(GET_CONST_NODE(operation->op1)->get_kind() == integer_cst_K)
985  {
986  const auto cst_val = tree_helper::GetConstValue(operation->op1);
987  if(cst_val < 0)
988  {
989  res.push_back(bit_lattice::X);
990  break;
991  }
992 
993  const auto op0_bitsize = BitLatticeManipulator::Size(operation->op0);
994  if(lhs_size <= static_cast<size_t>(cst_val))
995  {
996  res.push_front(bit_lattice::ZERO);
997  }
998  else
999  {
1000  res = op0_bitstring;
1001  while(res.size() > op0_bitsize)
1002  {
1003  res.pop_front();
1004  }
1005  for(integer_cst_t i = 0; i < cst_val; i++)
1006  {
1007  res.push_back(bit_lattice::ZERO);
1008  if(res.size() > lhs_size)
1009  {
1010  res.pop_front();
1011  }
1012  }
1013  }
1014  }
1015  else
1016  {
1017  THROW_ERROR("unexpected case");
1018  }
1019  }
1020  else if(rhs_kind == lt_expr_K)
1021  {
1022  if(op0_bitstring.size() > op1_bitstring.size())
1023  {
1024  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1025  }
1026  if(op1_bitstring.size() > op0_bitstring.size())
1027  {
1028  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1029  }
1030 
1031  const auto is_signed_var = op0_signed || op1_signed;
1032  auto op0_bitstring_it = op0_bitstring.begin();
1033  auto op1_bitstring_it = op1_bitstring.begin();
1034  auto computed_result = false;
1035  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
1036  ++op0_bitstring_it, ++op1_bitstring_it)
1037  {
1038  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
1039  {
1040  // <U> is UNKNOWN
1041  res.push_front(bit_lattice::U);
1042  computed_result = true;
1043  break;
1044  }
1045  else if(*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE)
1046  {
1047  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
1048  {
1049  // <0> is FALSE
1050  res.push_front(bit_lattice::ZERO);
1051  }
1052  else
1053  {
1054  // <1> is TRUE
1055  res.push_front(bit_lattice::ONE);
1056  }
1057  computed_result = true;
1058  break;
1059  }
1060  else if(*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO)
1061  {
1062  if(op0_bitstring_it == op0_bitstring.begin() && is_signed_var)
1063  {
1064  // <1> is TRUE
1065  res.push_front(bit_lattice::ONE);
1066  }
1067  else
1068  {
1069  // <0> is FALSE
1070  res.push_front(bit_lattice::ZERO);
1071  }
1072  computed_result = true;
1073  break;
1074  }
1075  }
1076  if(!computed_result)
1077  {
1078  // If the result is not computed until now the bitstring are equal
1079  // <0> is FALSE
1080  res.push_front(bit_lattice::ZERO);
1081  }
1082  }
1083  else if(rhs_kind == max_expr_K || rhs_kind == min_expr_K)
1084  {
1085  if(op0_bitstring.size() > op1_bitstring.size())
1086  {
1087  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1088  }
1089  if(op1_bitstring.size() > op0_bitstring.size())
1090  {
1091  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1092  }
1093 
1094  THROW_ASSERT(op0_signed == op1_signed, "");
1095  res = inf(op0_bitstring, op1_bitstring, operation->op0);
1096  }
1097  else if(rhs_kind == minus_expr_K)
1098  {
1099  const auto arg_size_max = std::max(op0_bitstring.size(), op1_bitstring.size());
1100  if(arg_size_max > op1_bitstring.size())
1101  {
1102  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, arg_size_max);
1103  }
1104  if(arg_size_max > op0_bitstring.size())
1105  {
1106  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, arg_size_max);
1107  }
1108 
1109  auto op0_it = op0_bitstring.rbegin();
1110  auto op1_it = op1_bitstring.rbegin();
1111  auto borrow = bit_lattice::ZERO;
1112  for(auto bit_index = 0u;
1113  bit_index < lhs_size && op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
1114  ++op0_it, ++op1_it, ++bit_index)
1115  {
1116  res.push_front(minus_expr_map.at(*op0_it).at(*op1_it).at(borrow).back());
1117  borrow = minus_expr_map.at(*op0_it).at(*op1_it).at(borrow).front();
1118  }
1119  if(lhs_signed && res.size() < lhs_size)
1120  {
1121  res.push_front(minus_expr_map.at(op0_bitstring.front()).at(op1_bitstring.front()).at(borrow).back());
1122  }
1123  else if(!lhs_signed)
1124  {
1125  while(res.size() < lhs_size)
1126  {
1127  res.push_front(borrow);
1128  }
1129  }
1130  }
1131  else if(rhs_kind == mult_expr_K || rhs_kind == widen_mult_expr_K)
1132  {
1133  // auto mult0 = [&] {
1134  // if(op0_bitstring.size() > op1_bitstring.size())
1135  // {
1136  // op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1137  // }
1138  // if(op1_bitstring.size() > op0_bitstring.size())
1139  // {
1140  // op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1141  // }
1142  //
1143  // auto op0_it = op0_bitstring.rbegin();
1144  // auto op1_it = op1_bitstring.rbegin();
1145  // auto op0_it_fw = op0_bitstring.begin();
1146  // auto op1_it_fw = op1_bitstring.begin();
1147  // // trailing zeros of a
1148  // unsigned int ta = 0;
1149  // // trailing zeros of b
1150  // unsigned int tb = 0;
1151  // // leading zeros of a
1152  // unsigned int la = 0;
1153  // // leading zeros of b
1154  // unsigned int lb = 0;
1155  // for(; op0_it != op0_bitstring.rend() && *op0_it == bit_lattice::ZERO; ++op0_it, ++ta)
1156  // ;
1157  //
1158  // for(; op1_it != op1_bitstring.rend() && *op1_it == bit_lattice::ZERO; ++op1_it, ++tb)
1159  // ;
1160  //
1161  // for(; op0_it_fw != op0_bitstring.end() && *op0_it_fw == bit_lattice::ZERO; ++op0_it_fw, ++la)
1162  // ;
1163  //
1164  // for(; op1_it_fw != op1_bitstring.end() && *op1_it_fw == bit_lattice::ZERO; ++op1_it_fw, ++lb)
1165  // ;
1166  //
1167  // // if one of the two arguments is all zeros returns zero
1168  // if(la == op0_bitstring.size() || lb == op1_bitstring.size())
1169  // {
1170  // res.push_back(bit_lattice::ZERO);
1171  // }
1172  // else
1173  // {
1174  // size_t lenght_op0 = op0_bitstring.size();
1175  // size_t lenght_op1 = op1_bitstring.size();
1176  // THROW_ASSERT(static_cast<int>(lenght_op0 + lenght_op1 - ta - tb - la - lb) > 0, "unexpected
1177  // condition"); res = create_u_bitstring(static_cast<unsigned int>(lenght_op0 + lenght_op1 - ta -
1178  // tb - la - lb)); for(unsigned int i = 0; i < ta + tb; i++)
1179  // res.push_back(bit_lattice::ZERO);
1180  // if(lhs_signed && la > 0 && lb > 0)
1181  // res.push_front(bit_lattice::ZERO);
1182  // while(res.size() > res_bitsize)
1183  // res.pop_front();
1184  // }
1185  // };
1186  // if(0)
1187  // mult0();
1188  const auto lenght_op0 = op0_bitstring.size();
1189  const auto lenght_op1 = op1_bitstring.size();
1190  const auto res_bitsize = std::min(lenght_op0 + lenght_op1, static_cast<size_t>(lhs_size));
1191  if(res_bitsize > op0_bitstring.size())
1192  {
1193  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, res_bitsize);
1194  }
1195  if(res_bitsize > op1_bitstring.size())
1196  {
1197  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, res_bitsize);
1198  }
1199  while(op0_bitstring.size() > res_bitsize)
1200  {
1201  op0_bitstring.pop_front();
1202  }
1203  while(op1_bitstring.size() > res_bitsize)
1204  {
1205  op1_bitstring.pop_front();
1206  }
1207 
1208  while(res.size() < res_bitsize)
1209  {
1210  res.push_front(bit_lattice::ZERO);
1211  }
1212  auto op1_it = op1_bitstring.crbegin();
1213  for(auto pos = 0u; op1_it != op1_bitstring.crend() && pos < res_bitsize; ++op1_it, ++pos)
1214  {
1215  std::deque<bit_lattice> temp_op1;
1216  while(temp_op1.size() < pos)
1217  {
1218  temp_op1.push_front(bit_lattice::ZERO);
1219  }
1220  auto op0_it = op0_bitstring.crbegin();
1221  for(size_t idx = 0; (idx + pos) < res_bitsize; ++idx, ++op0_it)
1222  {
1223  temp_op1.push_front(bit_and_expr_map.at(*op0_it).at(*op1_it));
1224  }
1225  bit_lattice carry1 = bit_lattice::ZERO;
1226  std::deque<bit_lattice> temp_res;
1227  auto temp_op1_it = temp_op1.crbegin();
1228  const auto temp_op1_end = temp_op1.crend();
1229  auto res_it = res.crbegin();
1230  const auto res_end = res.crend();
1231  for(auto bit_index = 0u; bit_index < res_bitsize && temp_op1_it != temp_op1_end && res_it != res_end;
1232  temp_op1_it++, res_it++, bit_index++)
1233  {
1234  temp_res.push_front(plus_expr_map.at(*temp_op1_it).at(*res_it).at(carry1).back());
1235  carry1 = plus_expr_map.at(*temp_op1_it).at(*res_it).at(carry1).front();
1236  }
1237  res = temp_res;
1238  }
1239  }
1240  else if(rhs_kind == ne_expr_K)
1241  {
1242  if(op0_bitstring.size() > op1_bitstring.size())
1243  {
1244  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1245  }
1246  if(op1_bitstring.size() > op0_bitstring.size())
1247  {
1248  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1249  }
1250 
1251  auto op0_bitstring_it = op0_bitstring.begin();
1252  auto op1_bitstring_it = op1_bitstring.begin();
1253  auto computed_result = false;
1254  for(; op0_bitstring_it != op0_bitstring.end() && op1_bitstring_it != op1_bitstring.end();
1255  ++op0_bitstring_it, ++op1_bitstring_it)
1256  {
1257  if(*op0_bitstring_it == bit_lattice::U || *op1_bitstring_it == bit_lattice::U)
1258  {
1259  // <U> is UNKNOWN
1260  res.push_front(bit_lattice::U);
1261  computed_result = true;
1262  break;
1263  }
1264  else if((*op0_bitstring_it == bit_lattice::ZERO && *op1_bitstring_it == bit_lattice::ONE) ||
1265  (*op0_bitstring_it == bit_lattice::ONE && *op1_bitstring_it == bit_lattice::ZERO))
1266  {
1267  // <1> is TRUE
1268  res.push_front(bit_lattice::ONE);
1269  computed_result = true;
1270  break;
1271  }
1272  }
1273  if(!computed_result)
1274  {
1275  // If the result is not computed until now the bitstring are equal
1276  // <0> is FALSE
1277  res.push_front(bit_lattice::ZERO);
1278  }
1279  }
1280  else if(rhs_kind == plus_expr_K || rhs_kind == pointer_plus_expr_K)
1281  {
1282  if(op0_bitstring.size() > op1_bitstring.size())
1283  {
1284  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1285  }
1286  if(op1_bitstring.size() > op0_bitstring.size())
1287  {
1288  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1289  }
1290 
1291  auto op0_it = op0_bitstring.rbegin();
1292  auto op1_it = op1_bitstring.rbegin();
1293  auto carry1 = bit_lattice::ZERO;
1294  for(auto bit_index = 0u;
1295  bit_index < lhs_size && op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
1296  ++op0_it, ++op1_it, ++bit_index)
1297  {
1298  // INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "op0: "+STR(*op0_it) + "op1: "STR(*op1_it) + "carry:
1299  // " + STR(carry1));
1300  res.push_front(plus_expr_map.at(*op0_it).at(*op1_it).at(carry1).back());
1301  carry1 = plus_expr_map.at(*op0_it).at(*op1_it).at(carry1).front();
1302  }
1303 
1304  if(lhs_signed && res.size() < lhs_size)
1305  {
1306  res.push_front(plus_expr_map.at(op0_bitstring.front()).at(op1_bitstring.front()).at(carry1).back());
1307  }
1308  else if(!lhs_signed)
1309  {
1310  while(res.size() < lhs_size)
1311  {
1312  res.push_front(plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).back());
1313  carry1 = plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).front();
1314  }
1315  }
1316  }
1317  else if(rhs_kind == rrotate_expr_K)
1318  {
1319  if(GET_CONST_NODE(operation->op1)->get_kind() == ssa_name_K)
1320  {
1321  res = create_u_bitstring(lhs_size);
1322  }
1323  else if(GET_CONST_NODE(operation->op1)->get_kind() == integer_cst_K)
1324  {
1325  const auto op1_value = tree_helper::GetConstValue(operation->op1);
1326 
1327  if(lhs_size > op0_bitstring.size())
1328  {
1329  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
1330  }
1331  res = op0_bitstring;
1332  for(integer_cst_t index = 0; index < op1_value; ++index)
1333  {
1334  const auto cur_bit = res.back();
1335  res.pop_back();
1336  res.push_front(cur_bit);
1337  }
1338  }
1339  else
1340  {
1341  THROW_ERROR("unexpected case");
1342  }
1343  }
1344  else if(rhs_kind == rshift_expr_K)
1345  {
1346  if(GET_CONST_NODE(operation->op1)->get_kind() == ssa_name_K)
1347  {
1348  res = create_u_bitstring(static_cast<unsigned int>(op0_bitstring.size()));
1349  }
1350  else if(GET_CONST_NODE(operation->op1)->get_kind() == integer_cst_K)
1351  {
1352  const auto cst_val = tree_helper::GetConstValue(operation->op1);
1353  if(cst_val < 0)
1354  {
1356  "---forward_transfer, negative right shift is undefined behavior");
1357  res.push_back(bit_lattice::X);
1358  break;
1359  }
1360 
1361  if(op0_bitstring.size() <= static_cast<size_t>(cst_val))
1362  {
1363  if(op0_signed)
1364  {
1365  res.push_front(op0_bitstring.front());
1366  }
1367  else
1368  {
1369  res.push_front(bit_lattice::ZERO);
1370  }
1371  }
1372  else
1373  {
1374  const auto new_lenght = op0_bitstring.size() - static_cast<size_t>(cst_val);
1375  auto op0_it = op0_bitstring.begin();
1376  while(res.size() < new_lenght)
1377  {
1378  res.push_back(*op0_it);
1379  ++op0_it;
1380  }
1381  }
1382  }
1383  else
1384  {
1385  THROW_ERROR("unexpected case");
1386  }
1387  }
1388  else if(rhs_kind == trunc_mod_expr_K)
1389  {
1390  if(op0_signed)
1391  {
1392  res = create_u_bitstring(
1393  1 + static_cast<unsigned int>(std::min(op0_bitstring.size(), op1_bitstring.size())));
1394  }
1395  else
1396  {
1397  res =
1398  create_u_bitstring(static_cast<unsigned int>(std::min(op0_bitstring.size(), op1_bitstring.size())));
1399  }
1400  while(res.size() > lhs_size)
1401  {
1402  res.pop_front();
1403  }
1404  }
1405  else if(rhs_kind == truth_and_expr_K || rhs_kind == truth_andif_expr_K || rhs_kind == truth_or_expr_K ||
1406  rhs_kind == truth_orif_expr_K || rhs_kind == truth_xor_expr_K)
1407  {
1408  if(op0_bitstring.size() > op1_bitstring.size())
1409  {
1410  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1411  }
1412  if(op1_bitstring.size() > op0_bitstring.size())
1413  {
1414  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1415  }
1416 
1417  auto arg_left = bit_lattice::ZERO;
1418  for(const auto& current_bit : op0_bitstring)
1419  {
1420  if(current_bit == bit_lattice::ONE)
1421  {
1422  arg_left = bit_lattice::ONE;
1423  break;
1424  }
1425  else if(current_bit == bit_lattice::U)
1426  {
1427  arg_left = bit_lattice::U;
1428  }
1429  }
1430  auto arg_right = bit_lattice::ZERO;
1431  for(const auto& current_bit : op1_bitstring)
1432  {
1433  if(current_bit == bit_lattice::ONE)
1434  {
1435  arg_right = bit_lattice::ONE;
1436  break;
1437  }
1438  else if(current_bit == bit_lattice::U)
1439  {
1440  arg_right = bit_lattice::U;
1441  }
1442  }
1443  if(rhs_kind == truth_and_expr_K || rhs_kind == truth_andif_expr_K)
1444  {
1445  res.push_front(bit_and_expr_map.at(arg_left).at(arg_right));
1446  }
1447  else if(rhs_kind == truth_or_expr_K || rhs_kind == truth_orif_expr_K)
1448  {
1449  res.push_front(bit_ior_expr_map.at(arg_left).at(arg_right));
1450  }
1451  else if(rhs_kind == truth_xor_expr_K)
1452  {
1453  res.push_front(bit_xor_expr_map.at(arg_left).at(arg_right));
1454  }
1455  else
1456  {
1457  THROW_UNREACHABLE("unexpected condition");
1458  }
1459  }
1460  else if(rhs_kind == extract_bit_expr_K)
1461  {
1462  THROW_ASSERT(GET_CONST_NODE(operation->op1)->get_kind() == integer_cst_K, "unexpected condition");
1463  const auto cst_val = tree_helper::GetConstValue(operation->op1);
1464  THROW_ASSERT(cst_val >= 0, "unexpected condition");
1465 
1466  if(op0_bitstring.size() <= static_cast<size_t>(cst_val))
1467  {
1468  if(op0_signed)
1469  {
1470  res.push_front(op0_bitstring.front());
1471  }
1472  else
1473  {
1474  res.push_front(bit_lattice::ZERO);
1475  }
1476  }
1477  else
1478  {
1479  const auto new_lenght = op0_bitstring.size() - static_cast<size_t>(cst_val);
1480  auto op0_it = op0_bitstring.begin();
1481  while(res.size() < new_lenght)
1482  {
1483  res.push_back(*op0_it);
1484  ++op0_it;
1485  }
1486  while(res.size() > 1)
1487  {
1488  res.pop_front();
1489  }
1490  }
1491  }
1492  else
1493  {
1494  THROW_UNREACHABLE("Unhadled binary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
1495  ")");
1496  }
1497  break;
1498  }
1499  case bit_ior_concat_expr_K:
1500  case cond_expr_K:
1501  case ternary_plus_expr_K:
1502  case ternary_pm_expr_K:
1503  case ternary_mp_expr_K:
1504  case ternary_mm_expr_K:
1505  case fshl_expr_K:
1506  case fshr_expr_K:
1507  {
1508  const auto operation = GetPointer<const ternary_expr>(GET_CONST_NODE(rhs));
1509 
1510  if(!IsHandledByBitvalue(operation->op0))
1511  {
1513  "---operand " + STR(operation->op0) + " of type " +
1514  STR(tree_helper::CGetType(operation->op0)) + " not handled by bitvalue");
1515  res = create_u_bitstring(lhs_size);
1516  break;
1517  }
1518 
1519  if(!IsHandledByBitvalue(operation->op1))
1520  {
1522  "---operand " + STR(operation->op1) + " of type " +
1523  STR(tree_helper::CGetType(operation->op1)) + " not handled by bitvalue");
1524  res = create_u_bitstring(lhs_size);
1525  break;
1526  }
1527 
1528  if(!IsHandledByBitvalue(operation->op2))
1529  {
1531  "---operand " + STR(operation->op2) + " of type " +
1532  STR(tree_helper::CGetType(operation->op2)) + " not handled by bitvalue");
1533  res = create_u_bitstring(lhs_size);
1534  break;
1535  }
1536 
1537  const auto op0_signed = IsSignedIntegerType(operation->op0);
1538  auto op0_bitstring = get_current(operation->op0);
1539 
1540  const auto op1_signed = IsSignedIntegerType(operation->op1);
1541  auto op1_bitstring = get_current(operation->op1);
1542 
1543  auto op2_bitstring = get_current(operation->op2);
1544 
1545  INDENT_DBG_MEX(DEBUG_LEVEL_PEDANTIC, debug_level, "---forward_transfer");
1547  "--- operand0(" + STR(GET_INDEX_NODE(operation->op0)) +
1548  "): " + bitstring_to_string(op0_bitstring));
1550  "--- operand1(" + STR(GET_INDEX_NODE(operation->op1)) +
1551  "): " + bitstring_to_string(op1_bitstring));
1553  "--- operand2(" + STR(GET_INDEX_NODE(operation->op2)) +
1554  "): " + bitstring_to_string(op2_bitstring));
1556 
1557  if(rhs_kind == bit_ior_concat_expr_K)
1558  {
1559  const auto offset = tree_helper::GetConstValue(operation->op2);
1560 
1561  if(op0_bitstring.size() > op1_bitstring.size())
1562  {
1563  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, op0_bitstring.size());
1564  }
1565  if(op1_bitstring.size() > op0_bitstring.size())
1566  {
1567  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, op1_bitstring.size());
1568  }
1569 
1570  auto op0_it = op0_bitstring.rbegin();
1571  auto op1_it = op1_bitstring.rbegin();
1572  for(integer_cst_t index = 0; op0_it != op0_bitstring.rend() && op1_it != op1_bitstring.rend();
1573  ++op0_it, ++op1_it, ++index)
1574  {
1575  if(index < offset)
1576  {
1577  res.push_front(*op1_it);
1578  }
1579  else
1580  {
1581  res.push_front(*op0_it);
1582  }
1583  }
1584  }
1585  else if(rhs_kind == cond_expr_K)
1586  {
1587  auto arg_cond = bit_lattice::ZERO;
1588  for(const auto& current_bit : op0_bitstring)
1589  {
1590  if(current_bit == bit_lattice::ONE)
1591  {
1592  arg_cond = bit_lattice::ONE;
1593  break;
1594  }
1595  else if(current_bit == bit_lattice::U)
1596  {
1597  arg_cond = bit_lattice::U;
1598  }
1599  }
1600 
1601  if(arg_cond == bit_lattice::ZERO)
1602  {
1603  // Condition is false return second arg (op2_bitstring)
1604  res = op2_bitstring;
1605  }
1606  else if(arg_cond == bit_lattice::ONE)
1607  {
1608  // Condition is false return second arg (op2_bitstring)
1609  res = op1_bitstring;
1610  }
1611  else
1612  {
1613  // CONDITION IS UNKNOWN ---> RETURN INF OF ARGS
1614  /*
1615  * Note: forward transfer for cond_expr is the only case where it may
1616  * be necessary to compute the inf of the bitstrings of two ssa with
1617  * different signedness. For this reason the bitstrings of both ssa
1618  * must be extended with their own signedness before computing the inf.
1619  * The reason is that otherwise the inf() will sign_extend them with
1620  * the signedness of the output of the cond_expr, which is not
1621  * necessarily the same of both the inputs.
1622  */
1623  const auto inf_size = std::max(op1_bitstring.size(), op2_bitstring.size());
1624  if(op1_bitstring.size() < inf_size)
1625  {
1626  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, inf_size);
1627  }
1628  if(op2_bitstring.size() < inf_size)
1629  {
1630  op2_bitstring = sign_extend_bitstring(op2_bitstring, IsSignedIntegerType(operation->op2), inf_size);
1631  }
1632  res = inf(op1_bitstring, op2_bitstring, lhs);
1633  }
1634  }
1635  else if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_pm_expr_K || rhs_kind == ternary_mp_expr_K ||
1636  rhs_kind == ternary_mm_expr_K)
1637  {
1638  const auto arg_size_max = std::max({op0_bitstring.size(), op1_bitstring.size(), op2_bitstring.size()});
1639  if(op0_bitstring.size() < arg_size_max)
1640  {
1641  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, arg_size_max);
1642  }
1643  if(op1_bitstring.size() < arg_size_max)
1644  {
1645  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, arg_size_max);
1646  }
1647  if(op2_bitstring.size() < arg_size_max)
1648  {
1649  op2_bitstring = sign_extend_bitstring(op2_bitstring, IsSignedIntegerType(operation->op2), arg_size_max);
1650  }
1651 
1652  auto op0_it = op0_bitstring.crbegin();
1653  auto op1_it = op1_bitstring.crbegin();
1654  const auto op0_end = op0_bitstring.crend();
1655  const auto op1_end = op1_bitstring.crend();
1656  auto carry1 = bit_lattice::ZERO;
1657  std::deque<bit_lattice> res_int;
1658  for(auto bit_index = 0u; bit_index < lhs_size && op0_it != op0_end && op1_it != op1_end;
1659  op0_it++, op1_it++, bit_index++)
1660  {
1661  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_pm_expr_K)
1662  {
1663  res_int.push_front(plus_expr_map.at(*op0_it).at(*op1_it).at(carry1).back());
1664  carry1 = plus_expr_map.at(*op0_it).at(*op1_it).at(carry1).front();
1665  }
1666  else
1667  {
1668  res_int.push_front(minus_expr_map.at(*op0_it).at(*op1_it).at(carry1).back());
1669  carry1 = minus_expr_map.at(*op0_it).at(*op1_it).at(carry1).front();
1670  }
1671  }
1672 
1673  if(lhs_signed && res_int.size() < lhs_size)
1674  {
1675  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_pm_expr_K)
1676  {
1677  res_int.push_front(
1678  plus_expr_map.at(op0_bitstring.front()).at(op1_bitstring.front()).at(carry1).back());
1679  }
1680  else
1681  {
1682  res_int.push_front(
1683  minus_expr_map.at(op0_bitstring.front()).at(op1_bitstring.front()).at(carry1).back());
1684  }
1685  }
1686  else if(!lhs_signed)
1687  {
1688  while(res_int.size() < lhs_size)
1689  {
1690  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_pm_expr_K)
1691  {
1692  res_int.push_front(plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).back());
1693  carry1 = plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).front();
1694  }
1695  else
1696  {
1697  res_int.push_front(minus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).back());
1698  carry1 = minus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).front();
1699  }
1700  }
1701  }
1703 
1704  if(res_int.size() > op2_bitstring.size())
1705  {
1706  op2_bitstring =
1707  sign_extend_bitstring(op2_bitstring, IsSignedIntegerType(operation->op2), res_int.size());
1708  }
1709  auto op2_it = op2_bitstring.crbegin();
1710  auto res_int_it = res_int.crbegin();
1711  const auto op2_end = op2_bitstring.crend();
1712  const auto res_int_end = res_int.crend();
1713  carry1 = bit_lattice::ZERO;
1714  for(auto bit_index = 0u; bit_index < lhs_size && op2_it != op2_end && res_int_it != res_int_end;
1715  op2_it++, res_int_it++, bit_index++)
1716  {
1717  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_mp_expr_K)
1718  {
1719  res.push_front(plus_expr_map.at(*res_int_it).at(*op2_it).at(carry1).back());
1720  carry1 = plus_expr_map.at(*res_int_it).at(*op2_it).at(carry1).front();
1721  }
1722  else
1723  {
1724  res.push_front(minus_expr_map.at(*res_int_it).at(*op2_it).at(carry1).back());
1725  carry1 = minus_expr_map.at(*res_int_it).at(*op2_it).at(carry1).front();
1726  }
1727  }
1728 
1729  if(lhs_signed && res.size() < lhs_size)
1730  {
1731  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_mp_expr_K)
1732  {
1733  res.push_front(plus_expr_map.at(res_int.front()).at(op2_bitstring.front()).at(carry1).back());
1734  }
1735  else
1736  {
1737  res.push_front(minus_expr_map.at(res_int.front()).at(op2_bitstring.front()).at(carry1).back());
1738  }
1739  }
1740  else if(!lhs_signed)
1741  {
1742  while(res.size() < lhs_size)
1743  {
1744  if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_mp_expr_K)
1745  {
1746  res.push_front(plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).back());
1747  carry1 = plus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).front();
1748  }
1749  else
1750  {
1751  res.push_front(minus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).back());
1752  carry1 = minus_expr_map.at(bit_lattice::ZERO).at(bit_lattice::ZERO).at(carry1).front();
1753  }
1754  }
1755  }
1756  }
1757  else if(rhs_kind == fshl_expr_K)
1758  {
1759  if(GET_CONST_NODE(operation->op2)->get_kind() == ssa_name_K)
1760  {
1761  res = create_u_bitstring(lhs_size);
1762  }
1763  else if(GET_CONST_NODE(operation->op2)->get_kind() == integer_cst_K)
1764  {
1766  const auto offset = static_cast<unsigned int>(tree_helper::GetConstValue(operation->op2) %
1767  static_cast<unsigned int>(lhs_size));
1768 
1769  if(lhs_size > op0_bitstring.size())
1770  {
1771  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
1772  }
1773  if(lhs_size > op1_bitstring.size())
1774  {
1775  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, lhs_size);
1776  }
1777  res.insert(res.end(), op0_bitstring.begin() + offset, op0_bitstring.end());
1778  res.insert(res.end(), op1_bitstring.begin(), op1_bitstring.begin() + offset);
1779  THROW_ASSERT(res.size() == lhs_size, "");
1780  }
1781  else
1782  {
1783  THROW_ERROR("unexpected case");
1784  }
1785  }
1786  else if(rhs_kind == fshr_expr_K)
1787  {
1788  if(GET_CONST_NODE(operation->op2)->get_kind() == ssa_name_K)
1789  {
1790  res = create_u_bitstring(lhs_size);
1791  }
1792  else if(GET_CONST_NODE(operation->op2)->get_kind() == integer_cst_K)
1793  {
1795  const auto offset = static_cast<unsigned int>(tree_helper::GetConstValue(operation->op2) %
1796  static_cast<unsigned int>(lhs_size));
1797 
1798  if(lhs_size > op0_bitstring.size())
1799  {
1800  op0_bitstring = sign_extend_bitstring(op0_bitstring, op0_signed, lhs_size);
1801  }
1802  if(lhs_size > op1_bitstring.size())
1803  {
1804  op1_bitstring = sign_extend_bitstring(op1_bitstring, op1_signed, lhs_size);
1805  }
1806  res.insert(res.end(), op0_bitstring.begin() + offset, op0_bitstring.end());
1807  res.insert(res.end(), op1_bitstring.begin(), op1_bitstring.begin() + offset);
1808  THROW_ASSERT(res.size() == lhs_size, "");
1809  }
1810  else
1811  {
1812  THROW_ERROR("unexpected case");
1813  }
1814  }
1815  else
1816  {
1817  THROW_UNREACHABLE("Unhadled ternary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
1818  ")");
1819  }
1820  break;
1821  }
1822  // Unary expressions
1823  case fix_ceil_expr_K:
1824  case fix_floor_expr_K:
1825  case fix_round_expr_K:
1826  case fix_trunc_expr_K:
1827  case imagpart_expr_K:
1828  case realpart_expr_K:
1829  case alignof_expr_K:
1830  // Binary expressions
1831  case ltgt_expr_K:
1832  case mem_ref_K:
1833  case ordered_expr_K:
1834  case sat_minus_expr_K:
1835  case sat_plus_expr_K:
1836  case uneq_expr_K:
1837  case unge_expr_K:
1838  case ungt_expr_K:
1839  case unle_expr_K:
1840  case unlt_expr_K:
1841  case unordered_expr_K:
1842  case extractvalue_expr_K:
1843  case extractelement_expr_K:
1844  // Ternary expressions
1845  case bit_field_ref_K:
1846  case component_ref_K:
1847  case insertvalue_expr_K:
1848  case insertelement_expr_K:
1849  // Quaternary expressions
1850  case array_ref_K:
1851  {
1852  // Do nothing
1853  break;
1854  }
1855  case aggr_init_expr_K:
1856  case call_expr_K:
1857  {
1858  const auto call_it = direct_call_id_to_called_id.find(ga->index);
1859  if(call_it != direct_call_id_to_called_id.end())
1860  {
1861  const auto called_id = call_it->second;
1862  const auto tn = TM->CGetTreeNode(called_id);
1863  THROW_ASSERT(tn->get_kind() == function_decl_K, "node " + STR(called_id) + " is not a function_decl");
1864  const auto* fd = GetPointerS<const function_decl>(tn);
1865  if(fd->bit_values.empty())
1866  {
1867  res = create_u_bitstring(lhs_size);
1868  }
1869  else
1870  {
1871  res = string_to_bitstring(fd->bit_values);
1872  }
1873  }
1874  break;
1875  }
1876  case lut_expr_K:
1877  {
1878  // lut_transformation will take care of this
1879  res = create_u_bitstring(1);
1880  break;
1881  }
1882  // Unary expressions
1883  case arrow_expr_K:
1884  case buffer_ref_K:
1885  case card_expr_K:
1886  case cleanup_point_expr_K:
1887  case conj_expr_K:
1888  case exit_expr_K:
1889  case float_expr_K:
1890  case indirect_ref_K:
1891  case misaligned_indirect_ref_K:
1892  case loop_expr_K:
1893  case non_lvalue_expr_K:
1894  case reference_expr_K:
1895  case reinterpret_cast_expr_K:
1896  case sizeof_expr_K:
1897  case static_cast_expr_K:
1898  case throw_expr_K:
1899  case unsave_expr_K:
1900  case va_arg_expr_K:
1901  case paren_expr_K:
1902  case reduc_max_expr_K:
1903  case reduc_min_expr_K:
1904  case reduc_plus_expr_K:
1905  case vec_unpack_hi_expr_K:
1906  case vec_unpack_lo_expr_K:
1907  case vec_unpack_float_hi_expr_K:
1908  case vec_unpack_float_lo_expr_K:
1909  // Binary expressions
1910  case assert_expr_K:
1911  case catch_expr_K:
1912  case ceil_div_expr_K:
1913  case ceil_mod_expr_K:
1914  case complex_expr_K:
1915  case compound_expr_K:
1916  case eh_filter_expr_K:
1917  case fdesc_expr_K:
1918  case floor_div_expr_K:
1919  case floor_mod_expr_K:
1920  case goto_subroutine_K:
1921  case in_expr_K:
1922  case init_expr_K:
1923  case modify_expr_K:
1924  case mult_highpart_expr_K:
1925  case postdecrement_expr_K:
1926  case postincrement_expr_K:
1927  case predecrement_expr_K:
1928  case preincrement_expr_K:
1929  case range_expr_K:
1930  case rdiv_expr_K:
1931  case frem_expr_K:
1932  case round_div_expr_K:
1933  case round_mod_expr_K:
1934  case set_le_expr_K:
1935  case try_catch_expr_K:
1936  case try_finally_K:
1937  case widen_sum_expr_K:
1938  case with_size_expr_K:
1939  case vec_lshift_expr_K:
1940  case vec_rshift_expr_K:
1941  case widen_mult_hi_expr_K:
1942  case widen_mult_lo_expr_K:
1943  case vec_pack_trunc_expr_K:
1944  case vec_pack_sat_expr_K:
1945  case vec_pack_fix_trunc_expr_K:
1946  case vec_extracteven_expr_K:
1947  case vec_extractodd_expr_K:
1948  case vec_interleavehigh_expr_K:
1949  case vec_interleavelow_expr_K:
1950  // Ternary expressions
1951  case vtable_ref_K:
1952  case with_cleanup_expr_K:
1953  case obj_type_ref_K:
1954  case save_expr_K:
1955  case vec_cond_expr_K:
1956  case vec_perm_expr_K:
1957  case dot_prod_expr_K:
1958  // Quaternary expressions
1959  case array_range_ref_K:
1960  // Const nodes
1961  case complex_cst_K:
1962  case real_cst_K:
1963  case string_cst_K:
1964  case vector_cst_K:
1965  case void_cst_K:
1966  case CASE_DECL_NODES:
1967  case CASE_TYPE_NODES:
1968  case CASE_PRAGMA_NODES:
1969  case CASE_FAKE_NODES:
1970  case CASE_CPP_NODES:
1971  case CASE_GIMPLE_NODES:
1972  case binfo_K:
1973  case block_K:
1974  case case_label_expr_K:
1975  case constructor_K:
1976  case error_mark_K:
1977  case identifier_node_K:
1978  case statement_list_K:
1979  case target_expr_K:
1980  case target_mem_ref_K:
1981  case target_mem_ref461_K:
1982  case tree_list_K:
1983  case tree_vec_K:
1984  default:
1985  THROW_UNREACHABLE("Unhandled statement: " + ga->ToString() + " (" + tree_node::GetString(rhs_kind) + ")");
1986  break;
1987  }
1989  return res;
1990 }
#define GET_NODE(t)
Macro used to hide implementation details when accessing a tree_node from another tree_node...
Definition: tree_node.hpp:343
#define DEBUG_LEVEL_VERY_PEDANTIC
extremely verbose debugging print is performed.
#define INDENT_DBG_MEX(dbgLevel, curDbgLevel, mex)
We are producing a debug version of the program, so the message is printed;.
std::deque< bit_lattice > create_bitstring_from_constant(integer_cst_t value, unsigned long long len, bool signed_value)
Creates a bitstring from a constant input.
static const std::map< bit_lattice, std::map< bit_lattice, std::map< bit_lattice, std::deque< bit_lattice > > > > plus_expr_map
Map storing the implementation of the forward_transfer&#39;s plus_expr.
Definition: Bit_Value.hpp:86
File containing functions and utilities to support the printing of debug messagges.
std::string ToString() const
Print this node as string in gimple format.
#define DEBUG_LEVEL_PEDANTIC
very verbose debugging print is performed.
std::deque< bit_lattice > create_x_bitstring(size_t lenght)
Create a bitstring containing bits initialized at <X>
tree_nodeRef op1
The second operand of the binary expression.
Definition: tree_node.hpp:3024
Definition of the class representing a generic C application.
#define CASE_DECL_NODES
NOTE that cast_expr is a unary expression but it could not be included in the CASE_UNARY_EXPRESSION b...
Definition: tree_node.hpp:672
static std::string GetString(const enum kind k)
Given a kind, return the corresponding string.
Definition: tree_node.cpp:120
Full implementation of Bit Value analysis as described in BitValue Inference: Detecting and Exploitin...
tree_nodeRef op0
The first operand of the binary expression.
Definition: tree_node.hpp:3021
bool IsHandledByBitvalue(const tree_nodeConstRef &tn) const
Returns true if the type identified by type_id is handled by bitvalue analysis.
#define GET_INDEX_NODE(t)
Macro used to hide implementation details when accessing a tree_node from another tree_node...
Definition: tree_node.hpp:361
#define min(x, y)
Data structure describing a basic block at tree level.
static void get_required_values(std::vector< std::tuple< unsigned int, unsigned int >> &required, const tree_nodeRef &tn)
#define STR(s)
Macro which performs a lexical_cast to a string.
Auxiliary methods for manipulating string.
#define max
Definition: backprop.h:17
std::string ToString(ActorGraphBackend_Type actor_graph_backend_type)
Header include.
static const std::map< bit_lattice, std::map< bit_lattice, std::map< bit_lattice, std::deque< bit_lattice > > > > minus_expr_map
Map storing the implementation of the forward_transfer&#39;s minus_expr.
Definition: Bit_Value.hpp:92
void forward()
Applies the forward algorithm, as described in the paper, analyzing each assignment statement followi...
static bool IsSignedIntegerType(const tree_nodeConstRef &type)
Return true if the treenode is of integer type.
std::string bitstring_to_string(const std::deque< bit_lattice > &bitstring)
Translates a bitstring ( expressed as an std::deque of bit_lattice ) into a string of characters...
#define THROW_UNREACHABLE(str_expr)
helper function used to specify that some points should never be reached
Definition: exceptions.hpp:292
std::deque< bit_lattice > inf(const std::deque< bit_lattice > &a, const std::deque< bit_lattice > &b, const unsigned int output_uid) const
Computes the inf between two bitstrings.
static bool IsBooleanType(const tree_nodeConstRef &type)
Return true if the treenode is of bool type.
This class specifies the characteristic of a particular operation working on a given functional unit...
This struct specifies the gimple_assign node (GCC 4.3 tree node).
Definition: tree_node.hpp:3015
std::deque< bit_lattice > create_u_bitstring(size_t lenght)
Creates a bitstring containing bits initialized at <U>
unsigned int lsb_to_zero(const addr_expr *ae, bool safe) const
Definition: Bit_Value.cpp:888
CustomMap< unsigned int, std::deque< bit_lattice > > current
Map of the current bit-values of each variable.
Definition: bit_lattice.hpp:78
static const std::map< bit_lattice, std::map< bit_lattice, bit_lattice > > bit_and_expr_map
Map storing the implementation of the forward_transfer&#39;s bit_and_expr_map.
Definition: Bit_Value.hpp:107
bool update_current(std::deque< bit_lattice > &res, const tree_nodeConstRef &tn)
Given a bitstring res, and the id of a tree node ouput_uid, this functions checks if it is necessary ...
const unsigned int index
Represent the index read from the raw file and the index-1 of the vector of tree_node associated to t...
Definition: tree_node.hpp:146
static const std::map< bit_lattice, std::map< bit_lattice, bit_lattice > > bit_ior_expr_map
Map storing the implementation of the forward_transfer&#39;s bit_ior_expr_map.
Definition: Bit_Value.hpp:97
#define index(x, y)
Definition: Keccak.c:74
std::deque< bit_lattice > get_current(const tree_nodeConstRef &tn) const
Given an operand, returns its current bitvalue.
unsigned offset[NUM_VERTICES+1]
Definition: graph.h:3
#define GET_CONST_NODE(t)
Definition: tree_node.hpp:347
static unsigned long long Size(const tree_nodeConstRef &t)
Classes specification of the tree_node data structures.
const ParameterConstRef parameters
Set of input parameters.
bit_lattice
Definition: bit_lattice.hpp:60
#define THROW_ERROR(str_expr)
helper function used to throw an error in a standard way
Definition: exceptions.hpp:263
const tree_managerConstRef TM
Definition: bit_lattice.hpp:71
#define CASE_TYPE_NODES
This macro collects all case labels for type objects.
Definition: tree_node.hpp:581
This file collects some utility functions.
std::deque< bit_lattice > string_to_bitstring(const std::string &s)
inverse of bitstring_to_string
Definition: APInt.hpp:53
CustomUnorderedMapUnstable< unsigned int, unsigned int > direct_call_id_to_called_id
Maps the id of a gimple statement to the id of the function called in that statement.
Definition: Bit_Value.hpp:122
const unsigned int function_id
The index of the function to be analyzed.
std::deque< bit_lattice > forward_transfer(const gimple_assign *ga) const
Takes a gimple assignment, analyzes the operation performed from the rhs and its input bitstring...
const application_managerRef AppM
The application manager.
Class specification of the tree_reindex support class.
#define CASE_FAKE_NODES
This macro collects all case labels for fake or empty nodes.
Definition: tree_node.hpp:635
std::vector< blocRef > bb_topological
Topologically ordered basic blocks.
Definition: Bit_Value.hpp:115
Class specification of the basic_block structure.
CustomMap< unsigned int, std::deque< bit_lattice > > best
Map of the best bit-values of each variable.
Definition: bit_lattice.hpp:85
static tree_nodeConstRef CGetType(const tree_nodeConstRef &node)
Return the treenode of the type of node.
static std::deque< bit_lattice > sign_extend_bitstring(const std::deque< bit_lattice > &bitstring, bool bitstring_is_signed, size_t final_size)
Extends a bitstring.
this class is used to manage the command-line or XML options.
#define CASE_CPP_NODES
This macro collects all case labels for cpp nodes.
Definition: tree_node.hpp:644
Wrapper to call graph.
#define CASE_GIMPLE_NODES
This macro collects all cases labels for gimple nodes.
Definition: tree_node.hpp:700
static integer_cst_t GetConstValue(const tree_nodeConstRef &tn, bool is_signed=true)
Get value from integer constant.
int debug_level
The debug level.
#define GET_INDEX_CONST_NODE(t)
Definition: tree_node.hpp:363
bool IsSignedIntegerType(const tree_nodeConstRef &tn) const
Definition: bit_lattice.cpp:72
static const std::map< bit_lattice, std::map< bit_lattice, bit_lattice > > bit_xor_expr_map
Map storing the implementation of the forward_transfer&#39;s bit_xor_expr_map.
Definition: Bit_Value.hpp:102
Class specification of the manager of the tree structures extracted from the raw file.
A brief description of the C++ Header File.
#define CASE_PRAGMA_NODES
This macro collects all case labels for pragma objects.
Definition: tree_node.hpp:610
#define THROW_ASSERT(cond, str_expr)
helper function used to check an assert and if needed to throw an error in a standard way ...
Definition: exceptions.hpp:289

Generated on Mon Feb 12 2024 13:02:51 for PandA-2024.02 by doxygen 1.8.13