PandA-2024.02
Bit_Value_backward.cpp
Go to the documentation of this file.
1 /*
2  *
3  * _/_/_/ _/_/ _/ _/ _/_/_/ _/_/
4  * _/ _/ _/ _/ _/_/ _/ _/ _/ _/ _/
5  * _/_/_/ _/_/_/_/ _/ _/_/ _/ _/ _/_/_/_/
6  * _/ _/ _/ _/ _/ _/ _/ _/ _/
7  * _/ _/ _/ _/ _/ _/_/_/ _/ _/
8  *
9  * ***********************************************
10  * PandA Project
11  * URL: http://panda.dei.polimi.it
12  * Politecnico di Milano - DEIB
13  * System Architectures Group
14  * ***********************************************
15  * Copyright (C) 2004-2024 Politecnico di Milano
16  *
17  * This file is part of the PandA framework.
18  *
19  * The PandA framework is free software; you can redistribute it and/or modify
20  * it under the terms of the GNU General Public License as published by
21  * the Free Software Foundation; either version 3 of the License, or
22  * (at your option) any later version.
23  *
24  * This program is distributed in the hope that it will be useful,
25  * but WITHOUT ANY WARRANTY; without even the implied warranty of
26  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27  * GNU General Public License for more details.
28  *
29  * You should have received a copy of the GNU General Public License
30  * along with this program. If not, see <http://www.gnu.org/licenses/>.
31  *
32  */
40 // include class header
41 #include "Bit_Value.hpp"
42 
43 // include from tree/
44 #include "behavioral_helper.hpp"
45 #include "tree_basic_block.hpp"
46 #include "tree_helper.hpp"
47 #include "tree_manager.hpp"
48 #include "tree_node.hpp"
49 #include "tree_reindex.hpp"
50 
51 // behavior includes
52 #include "application_manager.hpp"
53 #include "call_graph_manager.hpp"
54 #include "function_behavior.hpp"
55 
56 // include boost range adaptors
57 #include "dbgPrintHelper.hpp" // for DEBUG_LEVEL_
58 #include "math_function.hpp" // for ceil_log2
59 #include "string_manipulation.hpp"
60 #include <boost/range/adaptors.hpp>
61 
62 std::deque<bit_lattice> Bit_Value::get_current_or_best(const tree_nodeConstRef& tn) const
63 {
64  const auto nid = GET_INDEX_CONST_NODE(tn);
65  const auto node = GET_CONST_NODE(tn);
66  if(node->get_kind() == ssa_name_K && current.find(nid) != current.end())
67  {
68  return current.at(nid);
69  }
70  THROW_ASSERT(best.count(nid), "");
71  return best.at(nid);
72 }
73 
74 std::deque<bit_lattice> Bit_Value::backward_chain(const tree_nodeConstRef& ssa_node) const
75 {
76  const auto ssa = GetPointerS<const ssa_name>(GET_CONST_NODE(ssa_node));
77  const auto ssa_nid = ssa->index;
78  std::deque<bit_lattice> res = create_x_bitstring(1);
79  for(const auto& stmt_use : ssa->CGetUseStmts())
80  {
81  const auto user_stmt = GET_CONST_NODE(stmt_use.first);
82  const auto user_kind = user_stmt->get_kind();
83  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "-->Analyzing use - " + STR(user_stmt));
84  std::deque<bit_lattice> user_res;
85  if(user_kind == gimple_assign_K)
86  {
87  const auto ga = GetPointerS<const gimple_assign>(user_stmt);
88  if(!IsHandledByBitvalue(ga->op0))
89  {
91  "---variable " + STR(GET_CONST_NODE(ga->op0)) + " of type " +
92  STR(tree_helper::CGetType(ga->op0)) + " not considered");
93  user_res = create_u_bitstring(BitLatticeManipulator::Size(ssa_node));
94  }
95  else if(ga->predicate && ga->predicate->index == ssa_nid)
96  {
97  user_res = create_u_bitstring(BitLatticeManipulator::Size(ssa_node));
98  }
99  else
100  {
101  user_res = backward_transfer(ga, ssa_nid);
102  }
103  }
104  else if(user_kind == gimple_phi_K)
105  {
106  const auto gp = GetPointerS<const gimple_phi>(user_stmt);
107  if(!gp->virtual_flag)
108  {
109  user_res = get_current_or_best(gp->res);
110  }
111  }
112  else if(user_kind == gimple_return_K)
113  {
114  const auto gr = GetPointerS<const gimple_return>(user_stmt);
115  THROW_ASSERT(gr->op, "ssa id " + STR(ssa_nid) + "used in empty return statement: " + STR(gr));
116  const auto res_it = current.find(function_id);
117  if(res_it != current.end())
118  {
119  user_res = res_it->second;
120  }
121  }
122  else if(user_kind == gimple_call_K)
123  {
124  const auto gc = GetPointerS<const gimple_call>(user_stmt);
125  const auto call_it = direct_call_id_to_called_id.find(gc->index);
126  if(call_it != direct_call_id_to_called_id.end())
127  {
128  const auto called_id = call_it->second;
129  const auto called_tn = TM->CGetTreeNode(called_id);
130  const auto called_fd = GetPointerS<const function_decl>(called_tn);
131 
132  const auto& actual_parms = gc->args;
133  const auto& formal_parms = called_fd->list_of_args;
134  THROW_ASSERT(actual_parms.size() == formal_parms.size(), "");
135  auto a_it = actual_parms.cbegin();
136  const auto a_end = actual_parms.cend();
137  auto f_it = formal_parms.cbegin();
138  const auto f_end = formal_parms.cend();
139  auto found = false;
140  for(; a_it != a_end && f_it != f_end; a_it++, f_it++)
141  {
142  if(GET_INDEX_CONST_NODE(*a_it) == ssa_nid)
143  {
144  const auto p_decl_id = AppM->getSSAFromParm(called_id, GET_INDEX_CONST_NODE(*f_it));
145  const auto parmssa = TM->CGetTreeNode(p_decl_id);
146  const auto pd = GetPointerS<const ssa_name>(parmssa);
147  std::deque<bit_lattice> tmp;
148  if(pd->bit_values.empty())
149  {
151  }
152  else
153  {
154  tmp = string_to_bitstring(pd->bit_values);
155  }
157  user_res = found ? inf(user_res, tmp, ssa_node) : tmp;
158  found = true;
159  }
160  }
161  THROW_ASSERT(found, STR(ssa) + " is not an actual parameter of function " + STR(called_id));
162  }
163  }
164  else if(user_kind == gimple_cond_K || user_kind == gimple_multi_way_if_K || user_kind == gimple_switch_K ||
165  user_kind == gimple_asm_K)
166  {
167  user_res = create_u_bitstring(BitLatticeManipulator::Size(ssa_node));
168  }
169  else
170  {
171  THROW_UNREACHABLE("Unhandled statement: " + STR(user_stmt) + "(" + user_stmt->get_kind_text() + ")");
172  }
174  if(user_res.size())
175  {
176  res = inf(res, user_res, ssa_node);
178  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Analyzed use - " + STR(user_stmt));
179  }
180  else
181  {
182  THROW_ASSERT(best.count(ssa_nid), "");
183  res = best.at(ssa_nid);
185  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Analyzed use - " + STR(user_stmt));
186  break;
187  }
188  }
189  return res;
190 }
191 
193 {
194  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "-->Performing backward transfer");
195  std::deque<tree_nodeConstRef> working_list;
196  CustomUnorderedSet<unsigned int> working_list_idx;
197  const auto push_back = [&](const tree_nodeConstRef& stmt) {
198  const auto stmt_kind = stmt->get_kind();
199  if(!working_list_idx.count(stmt->index) && (stmt_kind == gimple_assign_K || stmt_kind == gimple_phi_K))
200  {
201  working_list.push_back(stmt);
202  working_list_idx.insert(stmt->index);
203  }
204  };
205  const auto pop_front = [&]() -> tree_nodeConstRef {
206  const auto stmt = working_list.front();
207  working_list.pop_front();
208  working_list_idx.erase(stmt->index);
209  return stmt;
210  };
211  for(const auto& bb : boost::adaptors::reverse(bb_topological))
212  {
213  for(const auto& stmt : boost::adaptors::reverse(bb->CGetStmtList()))
214  {
215  const auto s = GET_CONST_NODE(stmt);
216  push_back(s);
217  THROW_ASSERT(GetPointer<const gimple_node>(s)->bb_index == bb->number,
218  "BB" + STR(bb->number) + " contains statement from BB" +
219  STR(GetPointer<const gimple_node>(s)->bb_index) + " - " + s->get_kind_text() + " - " +
220  STR(s));
221  }
222  for(const auto& stmt : boost::adaptors::reverse(bb->CGetPhiList()))
223  {
224  const auto s = GET_CONST_NODE(stmt);
225  const auto gp = GetPointerS<const gimple_phi>(s);
226  if(!gp->virtual_flag)
227  {
228  if(IsHandledByBitvalue(gp->res))
229  {
230  push_back(s);
231  THROW_ASSERT(GetPointer<const gimple_node>(s)->bb_index == bb->number,
232  "BB" + STR(bb->number) + " contains statement from BB" +
233  STR(GetPointer<const gimple_node>(s)->bb_index) + " - " + s->get_kind_text() + " - " +
234  STR(s));
235  }
236  }
237  }
238  }
239  while(!working_list.empty())
240  {
241  const auto stmt = pop_front();
242  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "-->Analyzing statement " + STR(stmt));
243  const auto stmt_kind = stmt->get_kind();
244  tree_nodeConstRef lhs;
245  if(stmt_kind == gimple_assign_K)
246  {
247  lhs = GetPointerS<const gimple_assign>(stmt)->op0;
248  }
249  else if(stmt_kind == gimple_phi_K)
250  {
251  lhs = GetPointerS<const gimple_phi>(stmt)->res;
252  }
253  else
254  {
255  THROW_UNREACHABLE("Unexpected statement kind: " + stmt->get_kind_text() + " - " + STR(stmt));
256  }
257  const auto lhs_ssa = GetPointer<const ssa_name>(GET_CONST_NODE(lhs));
258  if(lhs_ssa)
259  {
260  if(!IsHandledByBitvalue(lhs))
261  {
263  "<--variable " + STR(lhs) + " of type " + STR(tree_helper::CGetType(lhs)) +
264  " not considered");
265  continue;
266  }
267  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "---Propagation for " + lhs_ssa->ToString());
268  auto res = backward_chain(lhs);
269  if(update_current(res, lhs))
270  {
272  "---current updated: " + bitstring_to_string(current.at(lhs->index)));
273  std::vector<std::tuple<unsigned int, unsigned int>> vars_read;
274  tree_helper::get_required_values(vars_read, TM->GetTreeNode(stmt->index));
275  for(const auto& var_pair : vars_read)
276  {
277  const auto in_ssa_nid = std::get<0>(var_pair);
278  if(in_ssa_nid == 0)
279  {
280  continue;
281  }
282  const auto in_ssa = TM->CGetTreeNode(in_ssa_nid);
283  if(!IsHandledByBitvalue(in_ssa))
284  {
285  continue;
286  }
287  if(in_ssa->get_kind() == ssa_name_K)
288  {
289  const auto ssa_var = GetPointerS<const ssa_name>(in_ssa);
290  const auto nextNode = ssa_var->CGetDefStmt();
291  push_back(GET_CONST_NODE(nextNode));
292  }
293  }
294  }
295  }
296  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Analyzed statement " + STR(stmt));
297  }
298 
299  const auto tn = TM->CGetTreeNode(function_id);
300  const auto fd = GetPointerS<const function_decl>(tn);
301  for(const auto& parm_decl_node : fd->list_of_args)
302  {
303  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "-->Analyzing argument " + STR(parm_decl_node));
304  const auto parmssa_id = AppM->getSSAFromParm(function_id, GET_INDEX_CONST_NODE(parm_decl_node));
305  const auto parmssa = TM->CGetTreeReindex(parmssa_id);
306  if(!IsHandledByBitvalue(parmssa))
307  {
309  "<--argument " + STR(parmssa) + " of type " + STR(tree_helper::CGetType(parmssa)) +
310  " not considered id: " + STR(parmssa_id));
311  continue;
312  }
313  auto res = get_current_or_best(parmssa);
314  if(bitstring_constant(res))
315  {
317  "<--argument has been proven to be constant: " + STR(parmssa));
318  continue;
319  }
320  res = backward_chain(parmssa);
321  THROW_ASSERT(res.size(), "");
323  update_current(res, parmssa);
324  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Analyzed argument " + STR(parm_decl_node));
325  }
326  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "<--Performed backward transfer");
327 }
328 
329 std::deque<bit_lattice> Bit_Value::backward_transfer(const gimple_assign* ga, unsigned int res_nid) const
330 {
331  std::deque<bit_lattice> res;
332  if(GetPointer<const cst_node>(TM->CGetTreeNode(res_nid)))
333  {
334  return res;
335  }
336  THROW_ASSERT(best.count(res_nid), "");
337  if(bitstring_constant(best.at(res_nid)))
338  {
339  THROW_ASSERT(best.count(res_nid), "");
340  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "---backward transfer, skipping constant target");
342  return best.at(res_nid);
343  }
344  const auto& lhs = ga->op0;
345  const auto lhs_nid = GET_INDEX_CONST_NODE(lhs);
346  const auto lhs_size = BitLatticeManipulator::Size(lhs);
347  const auto lhs_kind = GET_CONST_NODE(lhs)->get_kind();
348 
349  switch(lhs_kind)
350  {
351  case array_ref_K:
352  {
353  auto operation = GetPointerS<const array_ref>(GET_CONST_NODE(ga->op0));
354  do
355  {
356  if(GET_INDEX_NODE(operation->op1) == res_nid && current.count(GET_INDEX_CONST_NODE(operation->op1)))
357  {
358  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "---LHS equals operand");
360  }
361  operation = GetPointerS<const array_ref>(GET_CONST_NODE(operation->op0));
362  } while(operation);
363  return res;
364  }
365  case mem_ref_K:
366  {
367  const auto operation = GetPointerS<const mem_ref>(GET_CONST_NODE(ga->op0));
368  if(GET_INDEX_CONST_NODE(operation->op0) == res_nid && current.count(GET_INDEX_CONST_NODE(operation->op0)))
369  {
370  INDENT_DBG_MEX(DEBUG_LEVEL_VERY_PEDANTIC, debug_level, "---LHS equals operand");
371  return create_u_bitstring(pointer_resizing(res_nid));
372  }
373  return res;
374  }
375  case ssa_name_K:
376  {
377  break;
378  }
380  case assert_expr_K:
381  case bit_and_expr_K:
382  case bit_ior_expr_K:
383  case bit_xor_expr_K:
384  case catch_expr_K:
385  case ceil_div_expr_K:
386  case ceil_mod_expr_K:
387  case complex_expr_K:
388  case compound_expr_K:
389  case eh_filter_expr_K:
390  case eq_expr_K:
391  case exact_div_expr_K:
392  case fdesc_expr_K:
393  case floor_div_expr_K:
394  case floor_mod_expr_K:
395  case ge_expr_K:
396  case gt_expr_K:
397  case goto_subroutine_K:
398  case in_expr_K:
399  case init_expr_K:
400  case le_expr_K:
401  case lrotate_expr_K:
402  case lshift_expr_K:
403  case lt_expr_K:
404  case max_expr_K:
405  case min_expr_K:
406  case minus_expr_K:
407  case modify_expr_K:
408  case mult_expr_K:
409  case mult_highpart_expr_K:
410  case ne_expr_K:
411  case ordered_expr_K:
412  case plus_expr_K:
413  case pointer_plus_expr_K:
414  case postdecrement_expr_K:
415  case postincrement_expr_K:
416  case predecrement_expr_K:
417  case preincrement_expr_K:
418  case range_expr_K:
419  case rdiv_expr_K:
420  case frem_expr_K:
421  case round_div_expr_K:
422  case round_mod_expr_K:
423  case rrotate_expr_K:
424  case rshift_expr_K:
425  case set_le_expr_K:
426  case trunc_div_expr_K:
427  case trunc_mod_expr_K:
428  case truth_and_expr_K:
429  case truth_andif_expr_K:
430  case truth_or_expr_K:
431  case truth_orif_expr_K:
432  case truth_xor_expr_K:
433  case try_catch_expr_K:
434  case try_finally_K:
435  case uneq_expr_K:
436  case ltgt_expr_K:
437  case unge_expr_K:
438  case ungt_expr_K:
439  case unle_expr_K:
440  case unlt_expr_K:
441  case unordered_expr_K:
442  case widen_sum_expr_K:
443  case widen_mult_expr_K:
444  case with_size_expr_K:
445  case vec_lshift_expr_K:
446  case vec_rshift_expr_K:
447  case widen_mult_hi_expr_K:
448  case widen_mult_lo_expr_K:
449  case vec_pack_trunc_expr_K:
450  case vec_pack_sat_expr_K:
451  case vec_pack_fix_trunc_expr_K:
452  case vec_extracteven_expr_K:
453  case vec_extractodd_expr_K:
454  case vec_interleavehigh_expr_K:
455  case vec_interleavelow_expr_K:
456  case extract_bit_expr_K:
457  case sat_plus_expr_K:
458  case sat_minus_expr_K:
460  case array_range_ref_K:
461  case CASE_TYPE_NODES:
462  case CASE_PRAGMA_NODES:
463  case CASE_FAKE_NODES:
464  case CASE_CPP_NODES:
465  case CASE_DECL_NODES:
466  case CASE_CST_NODES:
467  case CASE_GIMPLE_NODES:
468  case aggr_init_expr_K:
469  case binfo_K:
470  case block_K:
471  case call_expr_K:
472  case case_label_expr_K:
473  case constructor_K:
474  case error_mark_K:
475  case identifier_node_K:
476  case lut_expr_K:
477  case statement_list_K:
478  case target_expr_K:
479  case target_mem_ref_K:
480  case target_mem_ref461_K:
481  case tree_list_K:
482  case tree_vec_K:
483  case extractvalue_expr_K:
484  case extractelement_expr_K:
485  default:
486  {
487  THROW_UNREACHABLE("Unhandled lhs expression: " + ga->ToString() + " (" + tree_node::GetString(lhs_kind) + ")");
488  break;
489  }
490  }
491 
492  const auto lhs_bitstring = get_current_or_best(lhs);
494  "---backward_transfer, lhs: " + bitstring_to_string(lhs_bitstring));
495  const auto& rhs = ga->op1;
496  const auto rhs_kind = GET_CONST_NODE(rhs)->get_kind();
498  switch(rhs_kind)
499  {
500  // Unary expressions
501  case addr_expr_K:
502  case bit_not_expr_K:
503  case convert_expr_K:
504  case negate_expr_K:
505  case nop_expr_K:
506  case truth_not_expr_K:
507  case view_convert_expr_K:
508  {
509  const auto operation = GetPointerS<const unary_expr>(GET_CONST_NODE(rhs));
510 
511  const auto op_nid = GET_INDEX_NODE(operation->op);
512  THROW_ASSERT(res_nid == op_nid, "Invalid operand: " + STR(res_nid) + " (" + ga->ToString() + ")");
514  {
515  break;
516  }
517  THROW_ASSERT(best.count(op_nid), "");
518  auto op_bitstring = best.at(op_nid);
520  "--- operand(" + STR(op_nid) + "): " + bitstring_to_string(op_bitstring));
521 
522  if(rhs_kind == addr_expr_K)
523  {
524  if(ga->temporary_address)
525  {
526  const auto op_kind = GET_CONST_NODE(operation->op)->get_kind();
527  if(op_kind == mem_ref_K)
528  {
529  const auto mr = GetPointerS<const mem_ref>(GET_CONST_NODE(operation->op));
530  if(GET_INDEX_CONST_NODE(mr->op0) == res_nid && current.count(GET_INDEX_CONST_NODE(mr->op0)))
531  {
533  }
534  }
535  else if(op_kind == target_mem_ref461_K)
536  {
537  const auto tmr = GetPointerS<const target_mem_ref461>(GET_CONST_NODE(operation->op));
538 
539  if(GET_INDEX_CONST_NODE(tmr->base) == res_nid && current.count(GET_INDEX_CONST_NODE(tmr->base)))
540  {
541  return create_u_bitstring(pointer_resizing(lhs_nid));
542  }
543  else if(tmr->idx2 && GET_INDEX_CONST_NODE(tmr->idx2) == res_nid &&
544  current.count(GET_INDEX_CONST_NODE(tmr->idx2)))
545  {
546  return create_u_bitstring(pointer_resizing(lhs_nid));
547  }
548  }
549  else if(op_kind == array_ref_K)
550  {
551  auto ar = GetPointerS<const array_ref>(GET_CONST_NODE(operation->op));
552  do
553  {
554  if(GET_INDEX_CONST_NODE(ar->op1) == res_nid && current.count(GET_INDEX_CONST_NODE(ar->op1)))
555  {
557  }
558  ar = GetPointerS<const array_ref>(GET_CONST_NODE(ar->op0));
559  } while(operation);
560  }
561  }
562  }
563  else if(rhs_kind == bit_not_expr_K)
564  {
565  const auto lhs_bitsize = lhs_bitstring.size();
566 
567  auto se_lhs_bitstring = lhs_bitstring;
568  const auto initial_size = op_bitstring.size();
569  if(initial_size < lhs_bitsize)
570  {
571  op_bitstring = sign_extend_bitstring(op_bitstring, IsSignedIntegerType(operation->op), lhs_bitsize);
572  }
573  if(initial_size > lhs_bitsize)
574  {
575  se_lhs_bitstring = sign_extend_bitstring(lhs_bitstring, IsSignedIntegerType(lhs), initial_size);
576  }
577 
578  auto it_lhs_bitstring = se_lhs_bitstring.rbegin();
579  auto it_op_bitstring = op_bitstring.rbegin();
580  for(; it_lhs_bitstring != se_lhs_bitstring.rend() && it_op_bitstring != op_bitstring.rend();
581  ++it_lhs_bitstring, ++it_op_bitstring)
582  {
583  if(*it_lhs_bitstring == bit_lattice::X)
584  {
585  res.push_front(bit_lattice::X);
586  }
587  else
588  {
589  res.push_front(*it_op_bitstring);
590  }
591  }
592  }
593  else if(rhs_kind == convert_expr_K || rhs_kind == nop_expr_K || rhs_kind == view_convert_expr_K)
594  {
595  const bool lhs_signed = IsSignedIntegerType(lhs);
596  const bool op_signed = IsSignedIntegerType(operation->op);
597  const auto op_size = BitLatticeManipulator::Size(operation->op);
598  if(op_signed && !lhs_signed)
599  {
600  /*
601  * ###################################################################
602  * WARNING!! do not remove this condition!
603  * the backward propagation of casts cannot be performed when the lhs
604  * is unsigned and the rhs is signed.
605  * the reason is that bitstrings attached to unsigned do not carry
606  * around implicit information on sign bits. on the contrary,
607  * bitstrings attached to signed carry this kind of implicit
608  * information. if we propagate back from unsigned to signed there are
609  * some corner cases when the sign bit information on the rhs would be
610  * overwritten by dontcares in the lhs unsigned string, leading to
611  * nasty propagations bugs, that occur rarely and only in very complex
612  * tests, and are very hard to track down.
613  * ###################################################################
614  */
615  break;
616  }
617 
618  res = lhs_bitstring;
619  if(res.size() < lhs_size)
620  {
621  res = sign_extend_bitstring(res, lhs_signed, lhs_size);
622  }
623  if(lhs_size > op_size)
624  {
625  if(op_size < 32 && op_size > 1)
626  {
627  const auto sign_bit = res.front();
628  res.pop_front();
629  while(res.size() > op_size)
630  {
631  res.pop_front();
632  }
633  res.front() = bit_inf(sign_bit, res.front());
634  }
635  else
636  {
637  while(res.size() > op_size)
638  {
639  res.pop_front();
640  }
641  }
642  }
643  }
644  else if(rhs_kind == negate_expr_K)
645  {
646  res = op_bitstring;
647  const auto initial_size = op_bitstring.size();
648  auto res_size = BitLatticeManipulator::size(TM, res_nid);
649  if(lhs_bitstring.front() == bit_lattice::U)
650  {
651  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size()));
652  }
653  if(lhs_bitstring.front() == bit_lattice::X)
654  {
655  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size() - 1));
656  }
657  while(res.size() > res_size)
658  {
659  res.pop_front();
660  }
661  if(res.size() != initial_size)
662  {
663  res.push_front(bit_lattice::X);
664  }
665  }
666  else if(rhs_kind == truth_not_expr_K)
667  {
668  res = op_bitstring;
669  while(res.size() > 1)
670  {
671  res.pop_front();
672  }
673  }
674  else
675  {
676  THROW_UNREACHABLE("Unhadled unary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
677  ")");
678  }
679  break;
680  }
681  // Binary expressions
682  case bit_and_expr_K:
683  case bit_ior_expr_K:
684  case bit_xor_expr_K:
685  case extract_bit_expr_K:
686  case lrotate_expr_K:
687  case lshift_expr_K:
688  case mem_ref_K:
689  case minus_expr_K:
690  case mult_expr_K:
691  case plus_expr_K:
692  case pointer_plus_expr_K:
693  case rrotate_expr_K:
694  case rshift_expr_K:
695  case truth_and_expr_K:
696  case truth_andif_expr_K:
697  case truth_or_expr_K:
698  case truth_orif_expr_K:
699  case truth_xor_expr_K:
700  case widen_mult_expr_K:
701  {
702  const auto operation = GetPointer<const binary_expr>(GET_CONST_NODE(rhs));
703 
704  auto op0_nid = GET_INDEX_NODE(operation->op0);
705  THROW_ASSERT(best.count(op0_nid), "");
706  auto op0_bitstring = best.at(op0_nid);
707 
708  auto op1_nid = GET_INDEX_NODE(operation->op1);
709  THROW_ASSERT(best.count(op1_nid), "");
710  auto op1_bitstring = best.at(op1_nid);
711 
712  THROW_ASSERT(res_nid == op0_nid || res_nid == op1_nid,
713  "Invalid operand: " + STR(res_nid) + " (" + ga->ToString() + ")");
715  "--- operand0(" + STR(op0_nid) + "): " + bitstring_to_string(op0_bitstring));
717  "--- operand1(" + STR(op1_nid) + "): " + bitstring_to_string(op1_bitstring));
718 
719  if(rhs_kind == bit_and_expr_K || rhs_kind == bit_ior_expr_K || rhs_kind == bit_xor_expr_K)
720  {
721  const auto lhs_bitsize = lhs_bitstring.size();
722 
723  if(op0_nid != res_nid)
724  {
725  std::swap(op0_nid, op1_nid);
726  std::swap(op0_bitstring, op1_bitstring);
727  }
728 
729  auto se_lhs_bitstring = lhs_bitstring;
730  const auto initial_size = op0_bitstring.size();
731  if(initial_size < lhs_bitsize)
732  {
733  op0_bitstring = sign_extend_bitstring(op0_bitstring, IsSignedIntegerType(operation->op0), lhs_bitsize);
734  }
735  if(initial_size > lhs_bitsize)
736  {
737  se_lhs_bitstring = sign_extend_bitstring(lhs_bitstring, IsSignedIntegerType(lhs), initial_size);
738  }
739  if(op0_bitstring.size() > op1_bitstring.size())
740  {
741  op1_bitstring =
742  sign_extend_bitstring(op1_bitstring, IsSignedIntegerType(operation->op1), op0_bitstring.size());
743  }
744  if(op1_bitstring.size() > op0_bitstring.size())
745  {
746  op0_bitstring =
747  sign_extend_bitstring(op0_bitstring, IsSignedIntegerType(operation->op0), op1_bitstring.size());
748  }
749 
750  auto it_lhs_bitstring = se_lhs_bitstring.rbegin();
751  auto it_op0_bitstring = op0_bitstring.rbegin();
752  auto it_op1_bitstring = op1_bitstring.rbegin();
753  for(; it_lhs_bitstring != se_lhs_bitstring.rend() && it_op0_bitstring != op0_bitstring.rend();
754  ++it_lhs_bitstring, ++it_op0_bitstring, ++it_op1_bitstring)
755  {
756  if(*it_lhs_bitstring == bit_lattice::X)
757  {
758  res.push_front(bit_lattice::X);
759  }
760  else if(rhs_kind == bit_and_expr_K && *it_op0_bitstring != bit_lattice::ZERO &&
761  *it_op1_bitstring == bit_lattice::ZERO)
762  {
763  res.push_front(bit_lattice::X);
764  }
765  else if(rhs_kind == bit_ior_expr_K && *it_op0_bitstring != bit_lattice::ONE &&
766  *it_op1_bitstring == bit_lattice::ONE)
767  {
768  res.push_front(bit_lattice::X);
769  }
770  else
771  {
772  res.push_front(*it_op0_bitstring);
773  }
774  }
775  }
776  else if(rhs_kind == lrotate_expr_K || rhs_kind == rrotate_expr_K)
777  {
778  if(op1_nid == res_nid)
779  {
780  const auto op_signed_p = tree_helper::is_int(TM, res_nid);
781  unsigned int log2;
782  for(log2 = 1; lhs_size > (1u << log2); ++log2)
783  {
784  ;
785  }
786  res = op1_bitstring;
787  for(auto index = 0u; res.size() > index + log2; ++index)
788  {
789  if(op_signed_p && (res.size() == index + log2 + 1))
790  {
791  res[index] = bit_lattice::ZERO;
792  }
793  else
794  {
795  res[index] = bit_lattice::X;
796  }
797  }
798  }
799  }
800  else if(rhs_kind == lshift_expr_K)
801  {
802  if(GET_CONST_NODE(operation->op1)->get_kind() != integer_cst_K)
803  {
804  if(op1_nid == res_nid)
805  {
806  const auto op_signed_p = tree_helper::is_int(TM, res_nid);
807  unsigned int log2;
808  for(log2 = 1; lhs_size > (1u << log2); ++log2)
809  {
810  ;
811  }
812  res = op1_bitstring;
813  for(auto index = 0u; res.size() > index + log2; ++index)
814  {
815  if(op_signed_p && (res.size() == index + log2 + 1))
816  {
817  res[index] = bit_lattice::ZERO;
818  }
819  else
820  {
821  res[index] = bit_lattice::X;
822  }
823  }
824  }
825  break;
826  }
827 
828  const auto cst_val = tree_helper::GetConstValue(operation->op1);
829  if(cst_val < 0)
830  {
831  res.push_back(bit_lattice::X);
832  break;
833  }
834 
835  res = lhs_bitstring;
836  while(res.size() > (lhs_bitstring.size() - static_cast<size_t>(cst_val)))
837  {
838  res.pop_back();
839  }
840  if(res.size() < lhs_bitstring.size())
841  {
842  res = sign_extend_bitstring(res, tree_helper::is_int(TM, res_nid), lhs_bitstring.size());
843  }
844  }
845  else if(rhs_kind == mem_ref_K)
846  {
847  if(op0_nid == res_nid)
848  {
849  res = create_u_bitstring(pointer_resizing(res_nid));
850  }
851  }
852  else if(rhs_kind == minus_expr_K || rhs_kind == mult_expr_K || rhs_kind == plus_expr_K ||
853  rhs_kind == pointer_plus_expr_K || rhs_kind == widen_mult_expr_K)
854  {
855  if(op0_nid != res_nid)
856  {
857  std::swap(op0_nid, op1_nid);
858  std::swap(op0_bitstring, op1_bitstring);
859  }
860 
861  res = op0_bitstring;
862  const auto initial_size = op0_bitstring.size();
863  auto res_size = BitLatticeManipulator::size(TM, res_nid);
864  if(lhs_bitstring.front() == bit_lattice::U)
865  {
866  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size()));
867  }
868  if(lhs_bitstring.front() == bit_lattice::X)
869  {
870  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size() - 1));
871  }
872  while(res.size() > res_size)
873  {
874  res.pop_front();
875  }
876  if(res.size() != initial_size)
877  {
878  res.push_front(bit_lattice::X);
879  }
880  }
881  else if(rhs_kind == rshift_expr_K)
882  {
883  if(GET_CONST_NODE(operation->op1)->get_kind() != integer_cst_K)
884  {
885  if(op1_nid == res_nid)
886  {
887  const auto op_signed_p = tree_helper::is_int(TM, res_nid);
888  unsigned int log2;
889  for(log2 = 1; lhs_size > (1u << log2); ++log2)
890  {
891  ;
892  }
893  res = op1_bitstring;
894  for(auto index = 0u; res.size() > index + log2; ++index)
895  {
896  if(op_signed_p && (res.size() == index + log2 + 1))
897  {
898  res[index] = bit_lattice::ZERO;
899  }
900  else
901  {
902  res[index] = bit_lattice::X;
903  }
904  }
905  }
906  break;
907  }
908 
909  const auto cst_val = tree_helper::GetConstValue(operation->op1);
911  if(cst_val < 0)
912  {
913  res.push_back(bit_lattice::X);
914  break;
915  }
916 
917  res = lhs_bitstring;
918  const auto shift_value = static_cast<unsigned long long>(cst_val);
919  for(auto shift_value_it = 0u; shift_value_it < shift_value; shift_value_it++)
920  {
921  res.push_back(bit_lattice::X);
922  }
923 
924  const auto shifted_type_size = BitLatticeManipulator::Size(operation->op0);
925  while(res.size() > shifted_type_size)
926  {
927  res.pop_front();
928  }
929  if(tree_helper::IsSignedIntegerType(operation->op0) && (lhs_bitstring.size() + shift_value) > lhs_size)
930  {
931  const auto lhs_sign_extend_end =
932  lhs_bitstring.begin() +
933  static_cast<decltype(lhs_bitstring)::difference_type>(lhs_bitstring.size() + shift_value - lhs_size);
934  if(std::find(lhs_bitstring.begin(), lhs_sign_extend_end, bit_lattice::U) != lhs_sign_extend_end)
935  {
936  res.front() = bit_lattice::U;
937  }
938  }
939  }
940  else if(rhs_kind == truth_and_expr_K || rhs_kind == truth_andif_expr_K || rhs_kind == truth_or_expr_K ||
941  rhs_kind == truth_orif_expr_K || rhs_kind == truth_xor_expr_K)
942  {
943  if(op0_nid != res_nid)
944  {
945  std::swap(op0_nid, op0_nid);
946  std::swap(op0_bitstring, op1_bitstring);
947  }
948  res = op0_bitstring;
949  while(res.size() > 1)
950  {
951  res.pop_front();
952  }
953  }
954  else if(rhs_kind == extract_bit_expr_K)
955  {
956  if(op1_nid == res_nid)
957  {
958  THROW_ERROR("unexpected condition");
959  break;
960  }
961 
962  const auto cst_val = tree_helper::GetConstValue(operation->op1);
963  THROW_ASSERT(cst_val >= 0, "unexpected condition");
964  THROW_ASSERT(lhs_bitstring.size() == 1, "unexpected condition - " + bitstring_to_string(lhs_bitstring));
965 
966  res = lhs_bitstring;
967  const auto shift_value = static_cast<unsigned long long>(cst_val);
968  for(auto shift_value_it = 0u; shift_value_it < shift_value; shift_value_it++)
969  {
970  res.push_back(bit_lattice::X);
971  }
972  const auto shifted_type_size = BitLatticeManipulator::Size(operation->op0);
973  while(res.size() < shifted_type_size)
974  {
975  res.push_front(bit_lattice::X);
976  }
977  while(res.size() > shifted_type_size)
978  {
979  res.pop_front();
980  }
981  }
982  else
983  {
984  THROW_UNREACHABLE("Unhadled binary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
985  ")");
986  }
987  break;
988  }
989  // Ternary expressions
990  case bit_ior_concat_expr_K:
991  case cond_expr_K:
992  case fshl_expr_K:
993  case fshr_expr_K:
994  case ternary_plus_expr_K:
995  case ternary_pm_expr_K:
996  case ternary_mp_expr_K:
997  case ternary_mm_expr_K:
998  {
999  const auto operation = GetPointer<const ternary_expr>(GET_CONST_NODE(rhs));
1000 
1001  auto op0_nid = GET_INDEX_NODE(operation->op0);
1002  THROW_ASSERT(best.count(op0_nid), "");
1003  auto op0_bitstring = best.at(op0_nid);
1004 
1005  auto op1_nid = GET_INDEX_NODE(operation->op1);
1006  THROW_ASSERT(best.count(op1_nid), "");
1007  auto op1_bitstring = best.at(op1_nid);
1008 
1009  auto op2_nid = GET_INDEX_NODE(operation->op2);
1010  THROW_ASSERT(best.count(op2_nid), "");
1011  auto op2_bitstring = best.at(op2_nid);
1012 
1013  THROW_ASSERT(res_nid == op0_nid || res_nid == op1_nid || res_nid == op2_nid,
1014  "Invalid operand: " + STR(res_nid) + " (" + ga->ToString() + ")");
1016  "--- operand0(" + STR(op0_nid) + "): " + bitstring_to_string(op0_bitstring));
1018  "--- operand1(" + STR(op1_nid) + "): " + bitstring_to_string(op1_bitstring));
1020  "--- operand2(" + STR(op2_nid) + "): " + bitstring_to_string(op2_bitstring));
1021 
1022  if(rhs_kind == bit_ior_concat_expr_K)
1023  {
1024  if(op2_nid == res_nid)
1025  {
1026  break;
1027  }
1028  const auto lhs_bitsize = lhs_bitstring.size();
1029 
1030  if(op0_nid == res_nid)
1031  {
1032  op1_nid = 0;
1033  }
1034  if(op1_nid == res_nid)
1035  {
1036  op0_nid = op1_nid;
1037  op0_bitstring = op1_bitstring;
1038  }
1039 
1040  const auto offset = tree_helper::GetConstValue(operation->op2);
1041  const auto initial_size = op0_bitstring.size();
1042  auto se_lhs_bitstring = lhs_bitstring;
1043  if(initial_size < lhs_bitsize)
1044  {
1045  op0_bitstring = sign_extend_bitstring(op0_bitstring, IsSignedIntegerType(operation->op0), lhs_bitsize);
1046  }
1047  if(initial_size > lhs_bitsize)
1048  {
1049  se_lhs_bitstring = sign_extend_bitstring(lhs_bitstring, IsSignedIntegerType(lhs), initial_size);
1050  }
1051 
1052  auto it_lhs_bitstring = se_lhs_bitstring.rbegin();
1053  auto it_op0_bitstring = op0_bitstring.rbegin();
1054  integer_cst_t index = 0;
1055  if(op1_nid)
1056  {
1057  for(; it_lhs_bitstring != se_lhs_bitstring.rend() && it_op0_bitstring != op0_bitstring.rend() &&
1058  index < offset;
1059  ++it_lhs_bitstring, ++it_op0_bitstring, ++index)
1060  {
1061  res.push_front(*it_lhs_bitstring);
1062  }
1063  if(IsSignedIntegerType(operation->op1))
1064  {
1065  res.push_front(bit_lattice::X);
1066  }
1067  }
1068  else
1069  {
1070  for(; it_lhs_bitstring != se_lhs_bitstring.rend() && it_op0_bitstring != op0_bitstring.rend();
1071  ++it_lhs_bitstring, ++it_op0_bitstring, ++index)
1072  {
1073  if(index < offset)
1074  {
1075  res.push_front(bit_lattice::ZERO);
1076  }
1077  else
1078  {
1079  res.push_front(*it_lhs_bitstring);
1080  }
1081  }
1082  }
1083  }
1084  else if(rhs_kind == cond_expr_K)
1085  {
1086  if(op0_nid == res_nid)
1087  {
1088  break;
1089  }
1090 
1091  if(op1_nid != res_nid)
1092  {
1093  std::swap(op1_nid, op2_nid);
1094  std::swap(op1_bitstring, op2_bitstring);
1095  }
1096 
1097  auto it_lhs_bitstring = lhs_bitstring.rbegin();
1098  auto it_op1_bitstring = op1_bitstring.rbegin();
1099  for(; it_lhs_bitstring != lhs_bitstring.rend() && it_op1_bitstring != op1_bitstring.rend();
1100  ++it_lhs_bitstring, ++it_op1_bitstring)
1101  {
1102  if(*it_lhs_bitstring == bit_lattice::X)
1103  {
1104  res.push_front(bit_lattice::X);
1105  }
1106  else
1107  {
1108  res.push_front(*it_op1_bitstring);
1109  }
1110  }
1111  if(res.front() == bit_lattice::X && op1_bitstring.size() < lhs_bitstring.size())
1112  {
1113  const auto arg1_sign = op1_bitstring.front();
1114  res.pop_front();
1115  res.push_front(arg1_sign);
1116  }
1117  }
1118  else if(rhs_kind == fshl_expr_K || rhs_kind == fshr_expr_K)
1119  {
1120  if(GET_CONST_NODE(operation->op2)->get_kind() != integer_cst_K)
1121  {
1122  if(op2_nid == res_nid)
1123  {
1124  res = create_u_bitstring(static_cast<size_t>(ceil_log2(lhs_size)));
1125  }
1126  break;
1127  }
1128  if(op0_nid == op1_nid)
1129  {
1130  res = create_u_bitstring(lhs_size);
1131  break;
1132  }
1133 
1135  const auto offset = static_cast<size_t>(tree_helper::GetConstValue(operation->op2)) % lhs_size;
1136  if(op0_nid == res_nid)
1137  {
1138  res = create_u_bitstring(static_cast<size_t>(lhs_size - offset));
1139  }
1140  else
1141  {
1142  THROW_ASSERT(op1_nid == res_nid, "");
1143  res = create_u_bitstring(offset);
1144  res.insert(res.end(), static_cast<size_t>(lhs_size - offset), bit_lattice::X);
1145  }
1146  }
1147  else if(rhs_kind == ternary_plus_expr_K || rhs_kind == ternary_pm_expr_K || rhs_kind == ternary_mp_expr_K ||
1148  rhs_kind == ternary_mm_expr_K)
1149  {
1150  if(op0_nid == res_nid)
1151  {
1152  res = op0_bitstring;
1153  }
1154  if(op1_nid == res_nid)
1155  {
1156  res = op1_bitstring;
1157  }
1158  if(op2_nid == res_nid)
1159  {
1160  res = op2_bitstring;
1161  }
1162  const auto initial_size = res.size();
1163  auto res_size = BitLatticeManipulator::size(TM, res_nid);
1164  if(lhs_bitstring.front() == bit_lattice::U)
1165  {
1166  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size()));
1167  }
1168  if(lhs_bitstring.front() == bit_lattice::X)
1169  {
1170  res_size = std::min(res_size, static_cast<unsigned long long>(lhs_bitstring.size() - 1));
1171  }
1172  while(res.size() > res_size)
1173  {
1174  res.pop_front();
1175  }
1176  if(res.size() != initial_size)
1177  {
1178  res.push_front(bit_lattice::X);
1179  }
1180  }
1181  else
1182  {
1183  THROW_UNREACHABLE("Unhadled ternary expression: " + ga->ToString() + "(" + tree_node::GetString(rhs_kind) +
1184  ")");
1185  }
1186  break;
1187  }
1188  case array_ref_K:
1189  {
1190  auto operation = GetPointerS<const array_ref>(GET_CONST_NODE(rhs));
1191  do
1192  {
1193  if(GET_INDEX_CONST_NODE(operation->op1) == res_nid)
1194  {
1196  }
1197  operation = GetPointer<const array_ref>(GET_CONST_NODE(operation->op0));
1198  } while(operation);
1199  break;
1200  }
1201  case aggr_init_expr_K:
1202  case call_expr_K:
1203  {
1204  const auto call = GetPointerS<const call_expr>(GET_CONST_NODE(rhs));
1205  const auto call_it = direct_call_id_to_called_id.find(ga->index);
1206  if(call_it != direct_call_id_to_called_id.end())
1207  {
1208  const auto called_id = call_it->second;
1209  const auto tn = TM->CGetTreeNode(called_id);
1210  const auto fd = GetPointerS<const function_decl>(tn);
1211 
1212  const auto actual_parms = call->args;
1213  const auto formal_parms = fd->list_of_args;
1214  THROW_ASSERT(actual_parms.size() == formal_parms.size(), "");
1215  auto a_it = actual_parms.cbegin();
1216  auto a_end = actual_parms.cend();
1217  auto f_it = formal_parms.cbegin();
1218  auto f_end = formal_parms.cend();
1219  bool found = actual_parms.empty();
1220  for(; a_it != a_end && f_it != f_end; a_it++, f_it++)
1221  {
1222  if(GET_INDEX_CONST_NODE(*a_it) == res_nid)
1223  {
1224  const auto p_decl_id = AppM->getSSAFromParm(called_id, GET_INDEX_CONST_NODE(*f_it));
1225  const auto parmssa = TM->CGetTreeNode(p_decl_id);
1226  const auto pd = GetPointerS<const ssa_name>(parmssa);
1227  std::deque<bit_lattice> tmp;
1228  if(pd->bit_values.empty())
1229  {
1231  }
1232  else
1233  {
1234  tmp = string_to_bitstring(pd->bit_values);
1235  }
1236 
1237  res = found ? inf(res, tmp, res_nid) : tmp;
1238  found = true;
1239  }
1240  }
1241  THROW_ASSERT(found, STR(res_nid) + " is not an actual parameter of function " + STR(called_id));
1242  }
1243  break;
1244  }
1245  case ssa_name_K:
1246  {
1247  THROW_ASSERT(best.count(GET_INDEX_CONST_NODE(rhs)), "");
1248  res = best.at(GET_INDEX_CONST_NODE(rhs));
1249  break;
1250  }
1251  // Unary expressions
1252  case abs_expr_K:
1253  case alignof_expr_K:
1254  // Binary expressions
1255  case eq_expr_K:
1256  case exact_div_expr_K:
1257  case ge_expr_K:
1258  case gt_expr_K:
1259  case le_expr_K:
1260  case lt_expr_K:
1261  case ltgt_expr_K:
1262  case max_expr_K:
1263  case min_expr_K:
1264  case ne_expr_K:
1265  case ordered_expr_K:
1266  case sat_minus_expr_K:
1267  case sat_plus_expr_K:
1268  case trunc_div_expr_K:
1269  case trunc_mod_expr_K:
1270  case uneq_expr_K:
1271  case unge_expr_K:
1272  case ungt_expr_K:
1273  case unle_expr_K:
1274  case unlt_expr_K:
1275  case unordered_expr_K:
1276  case extractvalue_expr_K:
1277  case extractelement_expr_K:
1278  // Ternary expressions
1279  case lut_expr_K:
1280  case insertvalue_expr_K:
1281  case insertelement_expr_K:
1282  {
1283  // Do nothing
1284  break;
1285  }
1286  // Unary expressions
1287  case arrow_expr_K:
1288  case buffer_ref_K:
1289  case card_expr_K:
1290  case cleanup_point_expr_K:
1291  case conj_expr_K:
1292  case exit_expr_K:
1293  case fix_ceil_expr_K:
1294  case fix_floor_expr_K:
1295  case fix_round_expr_K:
1296  case fix_trunc_expr_K:
1297  case float_expr_K:
1298  case imagpart_expr_K:
1299  case indirect_ref_K:
1300  case misaligned_indirect_ref_K:
1301  case loop_expr_K:
1302  case non_lvalue_expr_K:
1303  case paren_expr_K:
1304  case realpart_expr_K:
1305  case reference_expr_K:
1306  case reinterpret_cast_expr_K:
1307  case sizeof_expr_K:
1308  case static_cast_expr_K:
1309  case throw_expr_K:
1310  case unsave_expr_K:
1311  case va_arg_expr_K:
1312  case reduc_max_expr_K:
1313  case reduc_min_expr_K:
1314  case reduc_plus_expr_K:
1315  case vec_unpack_hi_expr_K:
1316  case vec_unpack_lo_expr_K:
1317  case vec_unpack_float_hi_expr_K:
1318  case vec_unpack_float_lo_expr_K:
1319  // Binary expressions
1320  case assert_expr_K:
1321  case catch_expr_K:
1322  case ceil_div_expr_K:
1323  case ceil_mod_expr_K:
1324  case complex_expr_K:
1325  case compound_expr_K:
1326  case eh_filter_expr_K:
1327  case fdesc_expr_K:
1328  case floor_div_expr_K:
1329  case floor_mod_expr_K:
1330  case goto_subroutine_K:
1331  case in_expr_K:
1332  case init_expr_K:
1333  case modify_expr_K:
1334  case mult_highpart_expr_K:
1335  case postdecrement_expr_K:
1336  case postincrement_expr_K:
1337  case predecrement_expr_K:
1338  case preincrement_expr_K:
1339  case range_expr_K:
1340  case rdiv_expr_K:
1341  case frem_expr_K:
1342  case round_div_expr_K:
1343  case round_mod_expr_K:
1344  case set_le_expr_K:
1345  case try_catch_expr_K:
1346  case try_finally_K:
1347  case widen_sum_expr_K:
1348  case with_size_expr_K:
1349  case vec_lshift_expr_K:
1350  case vec_rshift_expr_K:
1351  case widen_mult_hi_expr_K:
1352  case widen_mult_lo_expr_K:
1353  case vec_pack_trunc_expr_K:
1354  case vec_pack_sat_expr_K:
1355  case vec_pack_fix_trunc_expr_K:
1356  case vec_extracteven_expr_K:
1357  case vec_extractodd_expr_K:
1358  case vec_interleavehigh_expr_K:
1359  case vec_interleavelow_expr_K:
1360  // Ternary expressions
1361  case component_ref_K:
1362  case bit_field_ref_K:
1363  case vtable_ref_K:
1364  case with_cleanup_expr_K:
1365  case obj_type_ref_K:
1366  case save_expr_K:
1367  case vec_cond_expr_K:
1368  case vec_perm_expr_K:
1369  case dot_prod_expr_K:
1370  // Quaternary expressions
1371  case array_range_ref_K:
1372  case CASE_TYPE_NODES:
1373  case CASE_PRAGMA_NODES:
1374  case CASE_FAKE_NODES:
1375  case CASE_CPP_NODES:
1376  case CASE_DECL_NODES:
1377  case CASE_CST_NODES:
1378  case CASE_GIMPLE_NODES:
1379  case binfo_K:
1380  case block_K:
1381  case case_label_expr_K:
1382  case constructor_K:
1383  case error_mark_K:
1384  case identifier_node_K:
1385  case statement_list_K:
1386  case target_expr_K:
1387  case target_mem_ref_K:
1388  case target_mem_ref461_K:
1389  case tree_list_K:
1390  case tree_vec_K:
1391  default:
1392  {
1393  THROW_UNREACHABLE("Unhandled rhs expression: " + ga->ToString() + " (" + tree_node::GetString(rhs_kind) + ")");
1394  break;
1395  }
1396  }
1397  // TODO: this is because IrLowering is not doing its job, better fix it and remove this
1398  const auto res_size = BitLatticeManipulator::Size(TM->CGetTreeNode(res_nid));
1399  while(res.size() > res_size)
1400  {
1401  res.pop_front();
1402  }
1403  return res;
1404 }
#define DEBUG_LEVEL_VERY_PEDANTIC
extremely verbose debugging print is performed.
#define INDENT_DBG_MEX(dbgLevel, curDbgLevel, mex)
We are producing a debug version of the program, so the message is printed;.
File containing functions and utilities to support the printing of debug messagges.
static bool is_int(const tree_managerConstRef &TM, const unsigned int index)
Return true if the treenode is of integer type.
std::string ToString() const
Print this node as string in gimple format.
#define DEBUG_LEVEL_PEDANTIC
very verbose debugging print is performed.
std::deque< bit_lattice > create_x_bitstring(size_t lenght)
Create a bitstring containing bits initialized at <X>
tree_nodeRef op1
The second operand of the binary expression.
Definition: tree_node.hpp:3024
bool temporary_address
Definition: tree_node.hpp:3033
std::deque< bit_lattice > backward_chain(const tree_nodeConstRef &ssa) const
Definition of the class representing a generic C application.
#define CASE_DECL_NODES
NOTE that cast_expr is a unary expression but it could not be included in the CASE_UNARY_EXPRESSION b...
Definition: tree_node.hpp:672
mathematical utility function not provided by standard libraries
static std::string GetString(const enum kind k)
Given a kind, return the corresponding string.
Definition: tree_node.cpp:120
bool bitstring_constant(const std::deque< bit_lattice > &a)
Checks if a bitstring is constant.
Full implementation of Bit Value analysis as described in BitValue Inference: Detecting and Exploitin...
tree_nodeRef op0
The first operand of the binary expression.
Definition: tree_node.hpp:3021
bool IsHandledByBitvalue(const tree_nodeConstRef &tn) const
Returns true if the type identified by type_id is handled by bitvalue analysis.
void backward()
Applies the backward algorithm, as described in the paper, analyzing each assignment statement starti...
#define GET_INDEX_NODE(t)
Macro used to hide implementation details when accessing a tree_node from another tree_node...
Definition: tree_node.hpp:361
#define min(x, y)
Data structure describing a basic block at tree level.
static void get_required_values(std::vector< std::tuple< unsigned int, unsigned int >> &required, const tree_nodeRef &tn)
static bit_lattice bit_inf(const bit_lattice a, const bit_lattice b)
#define STR(s)
Macro which performs a lexical_cast to a string.
Auxiliary methods for manipulating string.
std::deque< bit_lattice > backward_transfer(const gimple_assign *ga, unsigned int output_id) const
Compute the inputs back propagation values, given a gimple assignment and the uid of the output varia...
#define max
Definition: backprop.h:17
static bool IsSignedIntegerType(const tree_nodeConstRef &type)
Return true if the treenode is of integer type.
std::string bitstring_to_string(const std::deque< bit_lattice > &bitstring)
Translates a bitstring ( expressed as an std::deque of bit_lattice ) into a string of characters...
#define THROW_UNREACHABLE(str_expr)
helper function used to specify that some points should never be reached
Definition: exceptions.hpp:292
T ceil_log2(T x)
Return the smallest n such that 2**n >= X.
std::deque< bit_lattice > inf(const std::deque< bit_lattice > &a, const std::deque< bit_lattice > &b, const unsigned int output_uid) const
Computes the inf between two bitstrings.
unsigned long long pointer_resizing(unsigned int output_id) const
Definition: Bit_Value.cpp:819
This class specifies the characteristic of a particular operation working on a given functional unit...
This struct specifies the gimple_assign node (GCC 4.3 tree node).
Definition: tree_node.hpp:3015
#define CASE_UNARY_EXPRESSION
This macro collects all case labels for unary_expr objects.
Definition: tree_node.hpp:371
std::deque< bit_lattice > create_u_bitstring(size_t lenght)
Creates a bitstring containing bits initialized at <U>
CustomMap< unsigned int, std::deque< bit_lattice > > current
Map of the current bit-values of each variable.
Definition: bit_lattice.hpp:78
bool update_current(std::deque< bit_lattice > &res, const tree_nodeConstRef &tn)
Given a bitstring res, and the id of a tree node ouput_uid, this functions checks if it is necessary ...
const unsigned int index
Represent the index read from the raw file and the index-1 of the vector of tree_node associated to t...
Definition: tree_node.hpp:146
#define index(x, y)
Definition: Keccak.c:74
static unsigned long long size(const tree_managerConstRef tm, unsigned int index)
unsigned offset[NUM_VERTICES+1]
Definition: graph.h:3
#define GET_CONST_NODE(t)
Definition: tree_node.hpp:347
static unsigned long long Size(const tree_nodeConstRef &t)
Classes specification of the tree_node data structures.
#define THROW_ERROR(str_expr)
helper function used to throw an error in a standard way
Definition: exceptions.hpp:263
const tree_managerConstRef TM
Definition: bit_lattice.hpp:71
#define CASE_TYPE_NODES
This macro collects all case labels for type objects.
Definition: tree_node.hpp:581
This file collects some utility functions.
std::deque< bit_lattice > string_to_bitstring(const std::string &s)
inverse of bitstring_to_string
Definition: APInt.hpp:53
CustomUnorderedMapUnstable< unsigned int, unsigned int > direct_call_id_to_called_id
Maps the id of a gimple statement to the id of the function called in that statement.
Definition: Bit_Value.hpp:122
const unsigned int function_id
The index of the function to be analyzed.
#define CASE_CST_NODES
This macro collects all case labels for cast nodes.
Definition: tree_node.hpp:689
const application_managerRef AppM
The application manager.
Class specification of the tree_reindex support class.
#define CASE_FAKE_NODES
This macro collects all case labels for fake or empty nodes.
Definition: tree_node.hpp:635
std::vector< blocRef > bb_topological
Topologically ordered basic blocks.
Definition: Bit_Value.hpp:115
CustomMap< unsigned int, std::deque< bit_lattice > > best
Map of the best bit-values of each variable.
Definition: bit_lattice.hpp:85
static tree_nodeConstRef CGetType(const tree_nodeConstRef &node)
Return the treenode of the type of node.
static std::deque< bit_lattice > sign_extend_bitstring(const std::deque< bit_lattice > &bitstring, bool bitstring_is_signed, size_t final_size)
Extends a bitstring.
#define CASE_CPP_NODES
This macro collects all case labels for cpp nodes.
Definition: tree_node.hpp:644
Wrapper to call graph.
static integer_cst_t GetConstValue(const tree_nodeConstRef &tn, bool is_signed=true)
Get value from integer constant.
#define CASE_GIMPLE_NODES
This macro collects all cases labels for gimple nodes.
Definition: tree_node.hpp:700
int debug_level
The debug level.
#define GET_INDEX_CONST_NODE(t)
Definition: tree_node.hpp:363
bool IsSignedIntegerType(const tree_nodeConstRef &tn) const
Definition: bit_lattice.cpp:72
std::deque< bit_lattice > get_current_or_best(const tree_nodeConstRef &tn) const
Given an operand, returns its current bitvalue, or its best if current is not available.
#define CASE_TERNARY_EXPRESSION
This macro collects all case labels for ternary_expr objects.
Definition: tree_node.hpp:550
Class specification of the manager of the tree structures extracted from the raw file.
A brief description of the C++ Header File.
#define CASE_PRAGMA_NODES
This macro collects all case labels for pragma objects.
Definition: tree_node.hpp:610
#define THROW_ASSERT(cond, str_expr)
helper function used to check an assert and if needed to throw an error in a standard way ...
Definition: exceptions.hpp:289

Generated on Mon Feb 12 2024 13:02:51 for PandA-2024.02 by doxygen 1.8.13