Print this page
11972 resync smatch


 114  * may be constant, and not actually need this block at all.
 115  */
 116 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
 117 {
 118         int changed = 0;
 119         pseudo_t phi;
 120         int bogus;
 121 
 122         /*
 123          * This a due to improper dominance tracking during
 124          * simplify_symbol_usage()/conversion to SSA form.
 125          * No sane simplification can be done when we have this.
 126          */
 127         bogus = bb_list_size(bb->parents) != pseudo_list_size(first->phi_list);
 128 
 129         FOR_EACH_PTR(first->phi_list, phi) {
 130                 struct instruction *def = phi->def;
 131                 struct basic_block *source, *target;
 132                 pseudo_t pseudo;
 133                 struct instruction *br;
 134                 int true;
 135 
 136                 if (!def)
 137                         continue;
 138                 source = def->bb;
 139                 pseudo = def->src1;
 140                 if (!pseudo || !source)
 141                         continue;
 142                 br = last_instruction(source->insns);
 143                 if (!br)
 144                         continue;
 145                 if (br->opcode != OP_CBR && br->opcode != OP_BR)
 146                         continue;
 147                 true = pseudo_truth_value(pseudo);
 148                 if (true < 0)
 149                         continue;
 150                 target = true ? second->bb_true : second->bb_false;
 151                 if (bb_depends_on(target, bb))
 152                         continue;
 153                 if (bb_depends_on_phi(target, bb))
 154                         continue;
 155                 changed |= rewrite_branch(source, &br->bb_true, bb, target);
 156                 changed |= rewrite_branch(source, &br->bb_false, bb, target);
 157                 if (changed && !bogus)
 158                         kill_use(THIS_ADDRESS(phi));
 159         } END_FOR_EACH_PTR(phi);
 160         return changed;
 161 }
 162 
 163 static int bb_has_side_effects(struct basic_block *bb)
 164 {
 165         struct instruction *insn;
 166         FOR_EACH_PTR(bb->insns, insn) {


 167                 switch (insn->opcode) {
 168                 case OP_CALL:
 169                         /* FIXME! This should take "const" etc into account */
 170                         return 1;
 171 







 172                 case OP_STORE:
 173                 case OP_CONTEXT:
 174                         return 1;
 175 
 176                 case OP_ASM:
 177                         /* FIXME! This should take "volatile" etc into account */
 178                         return 1;
 179 
 180                 default:
 181                         continue;
 182                 }
 183         } END_FOR_EACH_PTR(insn);
 184         return 0;
 185 }
 186 
 187 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
 188 {
 189         pseudo_t cond = br->cond;
 190         struct instruction *def;
 191 
 192         if (cond->type != PSEUDO_REG)
 193                 return 0;
 194         def = cond->def;
 195         if (def->bb != bb || def->opcode != OP_PHI)
 196                 return 0;
 197         if (bb_has_side_effects(bb))
 198                 return 0;
 199         return try_to_simplify_bb(bb, def, br);
 200 }
 201 
 202 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
 203         struct basic_block **target_p, int true)
 204 {
 205         struct basic_block *target = *target_p, *final;
 206         struct instruction *insn;
 207         int retval;
 208 
 209         if (target == bb)
 210                 return 0;
 211         insn = last_instruction(target->insns);
 212         if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
 213                 return 0;
 214         /*
 215          * Ahhah! We've found a branch to a branch on the same conditional!
 216          * Now we just need to see if we can rewrite the branch..
 217          */
 218         retval = 0;
 219         final = true ? insn->bb_true : insn->bb_false;
 220         if (bb_has_side_effects(target))
 221                 goto try_to_rewrite_target;
 222         if (bb_depends_on(final, target))
 223                 goto try_to_rewrite_target;
 224         if (bb_depends_on_phi(final, target))
 225                 return 0;
 226         return rewrite_branch(bb, target_p, target, final);
 227 
 228 try_to_rewrite_target:
 229         /*
 230          * If we're the only parent, at least we can rewrite the
 231          * now-known second branch.
 232          */
 233         if (bb_list_size(target->parents) != 1)
 234                 return retval;
 235         insert_branch(target, insn, final);
 236         return 1;
 237 }
 238 
 239 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)


 252         FOR_EACH_PTR(ep->bbs, bb) {
 253                 struct instruction *br = last_instruction(bb->insns);
 254 
 255                 if (!br || br->opcode != OP_CBR)
 256                         continue;
 257                 changed |= simplify_one_branch(bb, br);
 258         } END_FOR_EACH_PTR(bb);
 259         return changed;
 260 }
 261 
 262 /*
 263  * This is called late - when we have intra-bb liveness information..
 264  */
 265 int simplify_flow(struct entrypoint *ep)
 266 {
 267         return simplify_branch_nodes(ep);
 268 }
 269 
 270 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
 271 {
 272         concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
 273 }
 274 
 275 void convert_instruction_target(struct instruction *insn, pseudo_t src)
 276 {
 277         pseudo_t target;
 278         struct pseudo_user *pu;
 279         /*
 280          * Go through the "insn->users" list and replace them all..
 281          */
 282         target = insn->target;
 283         if (target == src)
 284                 return;
 285         FOR_EACH_PTR(target->users, pu) {
 286                 if (*pu->userp != VOID) {
 287                         assert(*pu->userp == target);
 288                         *pu->userp = src;
 289                 }
 290         } END_FOR_EACH_PTR(pu);
 291         if (has_use_list(src))
 292                 concat_user_list(target->users, &src->users);
 293         target->users = NULL;
 294 }
 295 
 296 void convert_load_instruction(struct instruction *insn, pseudo_t src)
 297 {
 298         convert_instruction_target(insn, src);
 299         /* Turn the load into a no-op */
 300         insn->opcode = OP_LNOP;
 301         insn->bb = NULL;
 302 }
 303 
 304 static int overlapping_memop(struct instruction *a, struct instruction *b)
 305 {
 306         unsigned int a_start = bytes_to_bits(a->offset);
 307         unsigned int b_start = bytes_to_bits(b->offset);
 308         unsigned int a_size = a->size;
 309         unsigned int b_size = b->size;
 310 
 311         if (a_size + a_start <= b_start)
 312                 return 0;
 313         if (b_size + b_start <= a_start)
 314                 return 0;
 315         return 1;
 316 }
 317 
 318 static inline int same_memop(struct instruction *a, struct instruction *b)
 319 {
 320         return  a->offset == b->offset && a->size == b->size;
 321 }


 345                 return 0;
 346         if (dom->src != pseudo) {
 347                 if (local)
 348                         return 0;
 349                 /* We don't think two explicitly different symbols ever alias */
 350                 if (distinct_symbols(insn->src, dom->src))
 351                         return 0;
 352                 /* We could try to do some alias analysis here */
 353                 return -1;
 354         }
 355         if (!same_memop(insn, dom)) {
 356                 if (dom->opcode == OP_LOAD)
 357                         return 0;
 358                 if (!overlapping_memop(insn, dom))
 359                         return 0;
 360                 return -1;
 361         }
 362         return 1;
 363 }
 364 
 365 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
 366 {
 367         pseudo_t p;
 368         FOR_EACH_PTR(list, p) {
 369                 if (p->def->bb == bb)
 370                         return 1;
 371         } END_FOR_EACH_PTR(p);
 372 
 373         return 0;
 374 }
 375 
 376 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
 377         struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
 378         int local)
 379 {
 380         struct basic_block *parent;
 381 
 382         if (!bb->parents)
 383                 return !!local;
 384 
 385         FOR_EACH_PTR(bb->parents, parent) {
 386                 struct instruction *one;
 387                 struct instruction *br;
 388                 pseudo_t phi;
 389 
 390                 FOR_EACH_PTR_REVERSE(parent->insns, one) {
 391                         int dominance;
 392                         if (one == insn)
 393                                 goto no_dominance;
 394                         dominance = dominates(pseudo, insn, one, local);
 395                         if (dominance < 0) {
 396                                 if (one->opcode == OP_LOAD)
 397                                         continue;
 398                                 return 0;
 399                         }
 400                         if (!dominance)
 401                                 continue;
 402                         goto found_dominator;
 403                 } END_FOR_EACH_PTR_REVERSE(one);
 404 no_dominance:
 405                 if (parent->generation == generation)
 406                         continue;
 407                 parent->generation = generation;
 408 
 409                 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
 410                         return 0;
 411                 continue;
 412 
 413 found_dominator:
 414                 if (dominators && phisrc_in_bb(*dominators, parent))
 415                         continue;
 416                 br = delete_last_instruction(&parent->insns);
 417                 phi = alloc_phi(parent, one->target, one->size);
 418                 phi->ident = phi->ident ? : pseudo->ident;
 419                 add_instruction(&parent->insns, br);
 420                 use_pseudo(insn, phi, add_pseudo(dominators, phi));
 421         } END_FOR_EACH_PTR(parent);
 422         return 1;
 423 }               
 424 
 425 /*
 426  * We should probably sort the phi list just to make it easier to compare
 427  * later for equality. 
 428  */
 429 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
 430 {
 431         pseudo_t new, phi;
 432 
 433         /*
 434          * Check for somewhat common case of duplicate
 435          * phi nodes.
 436          */
 437         new = first_pseudo(dominators)->def->src1;
 438         FOR_EACH_PTR(dominators, phi) {
 439                 if (new != phi->def->src1)
 440                         goto complex_phi;
 441                 new->ident = new->ident ? : phi->ident;
 442         } END_FOR_EACH_PTR(phi);
 443 
 444         /*
 445          * All the same pseudo - mark the phi-nodes unused
 446          * and convert the load into a LNOP and replace the
 447          * pseudo.
 448          */

 449         FOR_EACH_PTR(dominators, phi) {
 450                 kill_instruction(phi->def);
 451         } END_FOR_EACH_PTR(phi);
 452         convert_load_instruction(insn, new);
 453         return;
 454 
 455 complex_phi:
 456         /* We leave symbol pseudos with a bogus usage list here */
 457         if (insn->src->type != PSEUDO_SYM)
 458                 kill_use(&insn->src);
 459         insn->opcode = OP_PHI;
 460         insn->phi_list = dominators;
 461 }
 462 
 463 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
 464         unsigned long generation, int local)
 465 {
 466         struct basic_block *bb = insn->bb;
 467         struct instruction *one, *dom = NULL;
 468         struct pseudo_list *dominators;
 469         int partial;
 470 
 471         /* Unreachable load? Undo it */
 472         if (!bb) {
 473                 insn->opcode = OP_LNOP;
 474                 return 1;
 475         }
 476 
 477         partial = 0;
 478         FOR_EACH_PTR(bb->insns, one) {
 479                 int dominance;
 480                 if (one == insn)
 481                         goto found;
 482                 dominance = dominates(pseudo, insn, one, local);
 483                 if (dominance < 0) {
 484                         /* Ignore partial load dominators */
 485                         if (one->opcode == OP_LOAD)
 486                                 continue;
 487                         dom = NULL;
 488                         partial = 1;
 489                         continue;
 490                 }
 491                 if (!dominance)
 492                         continue;
 493                 dom = one;
 494                 partial = 0;
 495         } END_FOR_EACH_PTR(one);
 496         /* Whaa? */
 497         warning(pseudo->sym->pos, "unable to find symbol read");
 498         return 0;
 499 found:
 500         if (partial)
 501                 return 0;
 502 
 503         if (dom) {
 504                 convert_load_instruction(insn, dom->target);
 505                 return 1;
 506         }
 507 
 508         /* OK, go find the parents */
 509         bb->generation = generation;
 510 
 511         dominators = NULL;
 512         if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
 513                 return 0;
 514 
 515         /* This happens with initial assignments to structures etc.. */
 516         if (!dominators) {
 517                 if (!local)
 518                         return 0;
 519                 check_access(insn);
 520                 convert_load_instruction(insn, value_pseudo(insn->type, 0));
 521                 return 1;
 522         }
 523 
 524         /*
 525          * If we find just one dominating instruction, we
 526          * can turn it into a direct thing. Otherwise we'll
 527          * have to turn the load into a phi-node of the
 528          * dominators.
 529          */
 530         rewrite_load_instruction(insn, dominators);
 531         return 1;
 532 }
 533 
 534 static void kill_store(struct instruction *insn)
 535 {
 536         if (insn) {
 537                 insn->bb = NULL;
 538                 insn->opcode = OP_SNOP;
 539                 kill_use(&insn->target);
 540         }
 541 }
 542 
 543 /* Kill a pseudo that is dead on exit from the bb */
 544 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)














 545 {
 546         struct instruction *insn;
 547         struct basic_block *parent;
 548 
 549         if (bb->generation == generation)
 550                 return;
 551         bb->generation = generation;
 552         FOR_EACH_PTR_REVERSE(bb->insns, insn) {
 553                 int opcode = insn->opcode;
 554 
 555                 if (opcode != OP_LOAD && opcode != OP_STORE) {
 556                         if (local)
 557                                 continue;
 558                         if (opcode == OP_CALL)


 559                                 return;




 560                         continue;
 561                 }
 562                 if (insn->src == pseudo) {
 563                         if (opcode == OP_LOAD)

 564                                 return;
 565                         kill_store(insn);
 566                         continue;
 567                 }
 568                 if (local)
 569                         continue;
 570                 if (insn->src->type != PSEUDO_SYM)
 571                         return;
 572         } END_FOR_EACH_PTR_REVERSE(insn);
 573 
 574         FOR_EACH_PTR(bb->parents, parent) {
 575                 struct basic_block *child;
 576                 FOR_EACH_PTR(parent->children, child) {
 577                         if (child && child != bb)
 578                                 return;
 579                 } END_FOR_EACH_PTR(child);
 580                 kill_dead_stores(pseudo, generation, parent, local);
 581         } END_FOR_EACH_PTR(parent);
 582 }
 583 
 584 /*
 585  * This should see if the "insn" trivially dominates some previous store, and kill the
 586  * store if unnecessary.
 587  */
 588 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn, 
 589         unsigned long generation, struct basic_block *bb, int local, int found)
 590 {
 591         struct instruction *one;
 592         struct basic_block *parent;
 593 
 594         /* Unreachable store? Undo it */
 595         if (!bb) {
 596                 kill_store(insn);
 597                 return;
 598         }
 599         if (bb->generation == generation)
 600                 return;
 601         bb->generation = generation;
 602         FOR_EACH_PTR_REVERSE(bb->insns, one) {
 603                 int dominance;
 604                 if (!found) {
 605                         if (one != insn)
 606                                 continue;
 607                         found = 1;
 608                         continue;
 609                 }
 610                 dominance = dominates(pseudo, insn, one, local);
 611                 if (!dominance)
 612                         continue;
 613                 if (dominance < 0)
 614                         return;
 615                 if (one->opcode == OP_LOAD)
 616                         return;
 617                 kill_store(one);
 618         } END_FOR_EACH_PTR_REVERSE(one);
 619 
 620         if (!found) {
 621                 warning(bb->pos, "Unable to find instruction");
 622                 return;
 623         }
 624 
 625         FOR_EACH_PTR(bb->parents, parent) {
 626                 struct basic_block *child;
 627                 FOR_EACH_PTR(parent->children, child) {
 628                         if (child && child != bb)
 629                                 return;
 630                 } END_FOR_EACH_PTR(child);
 631                 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
 632         } END_FOR_EACH_PTR(parent);
 633 }
 634 
 635 void check_access(struct instruction *insn)
 636 {
 637         pseudo_t pseudo = insn->src;
 638 
 639         if (insn->bb && pseudo->type == PSEUDO_SYM) {
 640                 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
 641                 struct symbol *sym = pseudo->sym;
 642 
 643                 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))


 644                         warning(insn->pos, "invalid access %s '%s' (%d %d)",
 645                                 offset < 0 ? "below" : "past the end of",
 646                                 show_ident(sym->ident), offset,
 647                                 bits_to_bytes(sym->bit_size));

 648         }

 649 }
 650 
 651 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
 652 {
 653         pseudo_t pseudo;
 654         struct pseudo_user *pu;
 655         unsigned long mod;
 656         int all;





 657 
 658         /* Never used as a symbol? */
 659         pseudo = sym->pseudo;
 660         if (!pseudo)
 661                 return;
 662 
 663         /* We don't do coverage analysis of volatiles.. */
 664         if (sym->ctype.modifiers & MOD_VOLATILE)
 665                 return;
 666 
 667         /* ..and symbols with external visibility need more care */
 668         mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
 669         if (mod)
 670                 goto external_visibility;
 671 
 672         FOR_EACH_PTR(pseudo->users, pu) {
 673                 /* We know that the symbol-pseudo use is the "src" in the instruction */
 674                 struct instruction *insn = pu->insn;
 675 
 676                 switch (insn->opcode) {
 677                 case OP_STORE:
 678                         break;
 679                 case OP_LOAD:
 680                         break;
 681                 case OP_SYMADDR:
 682                         if (!insn->bb)
 683                                 continue;
 684                         mod |= MOD_ADDRESSABLE;
 685                         goto external_visibility;
 686                 case OP_NOP:
 687                 case OP_SNOP:
 688                 case OP_LNOP:
 689                 case OP_PHI:
 690                         continue;
 691                 default:
 692                         warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
 693                 }
 694         } END_FOR_EACH_PTR(pu);
 695 
 696 external_visibility:
 697         all = 1;
 698         FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
 699                 struct instruction *insn = pu->insn;
 700                 if (insn->opcode == OP_LOAD)
 701                         all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
 702         } END_FOR_EACH_PTR_REVERSE(pu);
 703 
 704         /* If we converted all the loads, remove the stores. They are dead */
 705         if (all && !mod) {
 706                 FOR_EACH_PTR(pseudo->users, pu) {
 707                         struct instruction *insn = pu->insn;
 708                         if (insn->opcode == OP_STORE)
 709                                 kill_store(insn);
 710                 } END_FOR_EACH_PTR(pu);
 711         } else {
 712                 /*
 713                  * If we couldn't take the shortcut, see if we can at least kill some
 714                  * of them..
 715                  */
 716                 FOR_EACH_PTR(pseudo->users, pu) {
 717                         struct instruction *insn = pu->insn;
 718                         if (insn->opcode == OP_STORE)
 719                                 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
 720                 } END_FOR_EACH_PTR(pu);
 721 
 722                 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
 723                         struct basic_block *bb;
 724                         FOR_EACH_PTR(ep->bbs, bb) {
 725                                 if (!bb->children)
 726                                         kill_dead_stores(pseudo, ++bb_generation, bb, !mod);

 727                         } END_FOR_EACH_PTR(bb);
 728                 }
 729         }
 730                         
 731         return;
 732 }
 733 
 734 void simplify_symbol_usage(struct entrypoint *ep)
 735 {
 736         pseudo_t pseudo;
 737 
 738         FOR_EACH_PTR(ep->accesses, pseudo) {
 739                 simplify_one_symbol(ep, pseudo->sym);
 740         } END_FOR_EACH_PTR(pseudo);
 741 }
 742 
 743 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
 744 {
 745         struct basic_block *child;
 746 
 747         if (bb->generation == generation)
 748                 return;
 749         bb->generation = generation;
 750         FOR_EACH_PTR(bb->children, child) {
 751                 mark_bb_reachable(child, generation);
 752         } END_FOR_EACH_PTR(child);
 753 }
 754 
 755 static void kill_defs(struct instruction *insn)
 756 {
 757         pseudo_t target = insn->target;
 758 
 759         if (!has_use_list(target))
 760                 return;
 761         if (target->def != insn)
 762                 return;
 763 
 764         convert_instruction_target(insn, VOID);
 765 }
 766 
 767 void kill_bb(struct basic_block *bb)
 768 {
 769         struct instruction *insn;
 770         struct basic_block *child, *parent;
 771 
 772         FOR_EACH_PTR(bb->insns, insn) {


 773                 kill_instruction_force(insn);
 774                 kill_defs(insn);
 775                 /*
 776                  * We kill unreachable instructions even if they
 777                  * otherwise aren't "killable" (e.g. volatile loads)
 778                  */
 779         } END_FOR_EACH_PTR(insn);
 780         bb->insns = NULL;
 781 
 782         FOR_EACH_PTR(bb->children, child) {
 783                 remove_bb_from_list(&child->parents, bb, 0);
 784         } END_FOR_EACH_PTR(child);
 785         bb->children = NULL;
 786 
 787         FOR_EACH_PTR(bb->parents, parent) {
 788                 remove_bb_from_list(&parent->children, bb, 0);
 789         } END_FOR_EACH_PTR(parent);
 790         bb->parents = NULL;
 791 }
 792 


 827                 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
 828                 assert(changed);
 829                 return changed;
 830         case OP_SWITCH: {
 831                 struct multijmp *jmp;
 832                 FOR_EACH_PTR(insn->multijmp_list, jmp) {
 833                         changed |= rewrite_branch(bb, &jmp->target, old, new);
 834                 } END_FOR_EACH_PTR(jmp);
 835                 assert(changed);
 836                 return changed;
 837         }
 838         default:
 839                 return 0;
 840         }
 841 }
 842 
 843 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
 844 {
 845         struct basic_block *parent;
 846         struct basic_block *target = br->bb_true;
 847         struct basic_block *false = br->bb_false;
 848 
 849         if (br->opcode == OP_CBR) {
 850                 pseudo_t cond = br->cond;
 851                 if (cond->type != PSEUDO_VAL)
 852                         return NULL;
 853                 target = cond->value ? target : false;
 854         }
 855 
 856         /*
 857          * We can't do FOR_EACH_PTR() here, because the parent list
 858          * may change when we rewrite the parent.
 859          */
 860         while ((parent = first_basic_block(bb->parents)) != NULL) {
 861                 if (!rewrite_parent_branch(parent, bb, target))
 862                         return NULL;
 863         }
 864         return target;
 865 }
 866 
 867 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
 868 {
 869         if (bb) {
 870                 struct basic_block *tmp;
 871                 int no_bb_in_list = 0;
 872 
 873                 FOR_EACH_PTR(list, tmp) {


 939 
 940 void pack_basic_blocks(struct entrypoint *ep)
 941 {
 942         struct basic_block *bb;
 943 
 944         /* See if we can merge a bb into another one.. */
 945         FOR_EACH_PTR(ep->bbs, bb) {
 946                 struct instruction *first, *insn;
 947                 struct basic_block *parent, *child, *last;
 948 
 949                 if (!bb_reachable(bb))
 950                         continue;
 951 
 952                 /*
 953                  * Just a branch?
 954                  */
 955                 FOR_EACH_PTR(bb->insns, first) {
 956                         if (!first->bb)
 957                                 continue;
 958                         switch (first->opcode) {
 959                         case OP_NOP: case OP_LNOP: case OP_SNOP:

 960                                 continue;
 961                         case OP_CBR:
 962                         case OP_BR: {
 963                                 struct basic_block *replace;
 964                                 replace = rewrite_branch_bb(bb, first);
 965                                 if (replace) {
 966                                         kill_bb(bb);
 967                                         goto no_merge;
 968                                 }
 969                         }
 970                         /* fallthrough */
 971                         default:
 972                                 goto out;
 973                         }
 974                 } END_FOR_EACH_PTR(first);
 975 
 976 out:
 977                 /*
 978                  * See if we only have one parent..
 979                  */


 985                                 continue;
 986                         }
 987                         last = parent;
 988                 } END_FOR_EACH_PTR(parent);
 989 
 990                 parent = last;
 991                 if (!parent || parent == bb)
 992                         continue;
 993 
 994                 /*
 995                  * Goodie. See if the parent can merge..
 996                  */
 997                 FOR_EACH_PTR(parent->children, child) {
 998                         if (child != bb)
 999                                 goto no_merge;
1000                 } END_FOR_EACH_PTR(child);
1001 
1002                 /*
1003                  * Merge the two.
1004                  */
1005                 repeat_phase |= REPEAT_CSE;
1006 
1007                 parent->children = bb->children;
1008                 bb->children = NULL;
1009                 bb->parents = NULL;
1010 
1011                 FOR_EACH_PTR(parent->children, child) {
1012                         replace_bb_in_list(&child->parents, bb, parent, 0);
1013                 } END_FOR_EACH_PTR(child);
1014 
1015                 kill_instruction(delete_last_instruction(&parent->insns));
1016                 FOR_EACH_PTR(bb->insns, insn) {
1017                         if (insn->bb) {

1018                                 assert(insn->bb == bb);
1019                                 insn->bb = parent;
1020                         }
1021                         add_instruction(&parent->insns, insn);
1022                 } END_FOR_EACH_PTR(insn);
1023                 bb->insns = NULL;
1024 
1025         no_merge:
1026                 /* nothing to do */;
1027         } END_FOR_EACH_PTR(bb);
1028 }
1029 
1030 


 114  * may be constant, and not actually need this block at all.
 115  */
 116 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
 117 {
 118         int changed = 0;
 119         pseudo_t phi;
 120         int bogus;
 121 
 122         /*
 123          * This a due to improper dominance tracking during
 124          * simplify_symbol_usage()/conversion to SSA form.
 125          * No sane simplification can be done when we have this.
 126          */
 127         bogus = bb_list_size(bb->parents) != pseudo_list_size(first->phi_list);
 128 
 129         FOR_EACH_PTR(first->phi_list, phi) {
 130                 struct instruction *def = phi->def;
 131                 struct basic_block *source, *target;
 132                 pseudo_t pseudo;
 133                 struct instruction *br;
 134                 int cond;
 135 
 136                 if (!def)
 137                         continue;
 138                 source = def->bb;
 139                 pseudo = def->src1;
 140                 if (!pseudo || !source)
 141                         continue;
 142                 br = last_instruction(source->insns);
 143                 if (!br)
 144                         continue;
 145                 if (br->opcode != OP_CBR && br->opcode != OP_BR)
 146                         continue;
 147                 cond = pseudo_truth_value(pseudo);
 148                 if (cond < 0)
 149                         continue;
 150                 target = cond ? second->bb_true : second->bb_false;
 151                 if (bb_depends_on(target, bb))
 152                         continue;
 153                 if (bb_depends_on_phi(target, bb))
 154                         continue;
 155                 changed |= rewrite_branch(source, &br->bb_true, bb, target);
 156                 changed |= rewrite_branch(source, &br->bb_false, bb, target);
 157                 if (changed && !bogus)
 158                         kill_use(THIS_ADDRESS(phi));
 159         } END_FOR_EACH_PTR(phi);
 160         return changed;
 161 }
 162 
 163 static int bb_has_side_effects(struct basic_block *bb)
 164 {
 165         struct instruction *insn;
 166         FOR_EACH_PTR(bb->insns, insn) {
 167                 if (!insn->bb)
 168                         continue;
 169                 switch (insn->opcode) {
 170                 case OP_CALL:
 171                         /* FIXME! This should take "const" etc into account */
 172                         return 1;
 173 
 174                 case OP_LOAD:
 175                         if (!insn->type)
 176                                 return 1;
 177                         if (insn->is_volatile)
 178                                 return 1;
 179                         continue;
 180 
 181                 case OP_STORE:
 182                 case OP_CONTEXT:
 183                         return 1;
 184 
 185                 case OP_ASM:
 186                         /* FIXME! This should take "volatile" etc into account */
 187                         return 1;
 188 
 189                 default:
 190                         continue;
 191                 }
 192         } END_FOR_EACH_PTR(insn);
 193         return 0;
 194 }
 195 
 196 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
 197 {
 198         pseudo_t cond = br->cond;
 199         struct instruction *def;
 200 
 201         if (cond->type != PSEUDO_REG)
 202                 return 0;
 203         def = cond->def;
 204         if (def->bb != bb || def->opcode != OP_PHI)
 205                 return 0;
 206         if (bb_has_side_effects(bb))
 207                 return 0;
 208         return try_to_simplify_bb(bb, def, br);
 209 }
 210 
 211 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
 212         struct basic_block **target_p, int bb_true)
 213 {
 214         struct basic_block *target = *target_p, *final;
 215         struct instruction *insn;
 216         int retval;
 217 
 218         if (target == bb)
 219                 return 0;
 220         insn = last_instruction(target->insns);
 221         if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
 222                 return 0;
 223         /*
 224          * Ahhah! We've found a branch to a branch on the same conditional!
 225          * Now we just need to see if we can rewrite the branch..
 226          */
 227         retval = 0;
 228         final = bb_true ? insn->bb_true : insn->bb_false;
 229         if (bb_has_side_effects(target))
 230                 goto try_to_rewrite_target;
 231         if (bb_depends_on(final, target))
 232                 goto try_to_rewrite_target;
 233         if (bb_depends_on_phi(final, target))
 234                 return 0;
 235         return rewrite_branch(bb, target_p, target, final);
 236 
 237 try_to_rewrite_target:
 238         /*
 239          * If we're the only parent, at least we can rewrite the
 240          * now-known second branch.
 241          */
 242         if (bb_list_size(target->parents) != 1)
 243                 return retval;
 244         insert_branch(target, insn, final);
 245         return 1;
 246 }
 247 
 248 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)


 261         FOR_EACH_PTR(ep->bbs, bb) {
 262                 struct instruction *br = last_instruction(bb->insns);
 263 
 264                 if (!br || br->opcode != OP_CBR)
 265                         continue;
 266                 changed |= simplify_one_branch(bb, br);
 267         } END_FOR_EACH_PTR(bb);
 268         return changed;
 269 }
 270 
 271 /*
 272  * This is called late - when we have intra-bb liveness information..
 273  */
 274 int simplify_flow(struct entrypoint *ep)
 275 {
 276         return simplify_branch_nodes(ep);
 277 }
 278 
 279 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
 280 {
 281         copy_ptr_list((struct ptr_list **)dst, (struct ptr_list *)src);
 282 }
 283 
 284 void convert_instruction_target(struct instruction *insn, pseudo_t src)
 285 {
 286         pseudo_t target;
 287         struct pseudo_user *pu;
 288         /*
 289          * Go through the "insn->users" list and replace them all..
 290          */
 291         target = insn->target;
 292         if (target == src)
 293                 return;
 294         FOR_EACH_PTR(target->users, pu) {
 295                 if (*pu->userp != VOID) {
 296                         assert(*pu->userp == target);
 297                         *pu->userp = src;
 298                 }
 299         } END_FOR_EACH_PTR(pu);
 300         if (has_use_list(src))
 301                 concat_user_list(target->users, &src->users);
 302         target->users = NULL;
 303 }
 304 
 305 void convert_load_instruction(struct instruction *insn, pseudo_t src)
 306 {
 307         convert_instruction_target(insn, src);
 308         kill_instruction(insn);
 309         repeat_phase |= REPEAT_SYMBOL_CLEANUP;

 310 }
 311 
 312 static int overlapping_memop(struct instruction *a, struct instruction *b)
 313 {
 314         unsigned int a_start = bytes_to_bits(a->offset);
 315         unsigned int b_start = bytes_to_bits(b->offset);
 316         unsigned int a_size = a->size;
 317         unsigned int b_size = b->size;
 318 
 319         if (a_size + a_start <= b_start)
 320                 return 0;
 321         if (b_size + b_start <= a_start)
 322                 return 0;
 323         return 1;
 324 }
 325 
 326 static inline int same_memop(struct instruction *a, struct instruction *b)
 327 {
 328         return  a->offset == b->offset && a->size == b->size;
 329 }


 353                 return 0;
 354         if (dom->src != pseudo) {
 355                 if (local)
 356                         return 0;
 357                 /* We don't think two explicitly different symbols ever alias */
 358                 if (distinct_symbols(insn->src, dom->src))
 359                         return 0;
 360                 /* We could try to do some alias analysis here */
 361                 return -1;
 362         }
 363         if (!same_memop(insn, dom)) {
 364                 if (dom->opcode == OP_LOAD)
 365                         return 0;
 366                 if (!overlapping_memop(insn, dom))
 367                         return 0;
 368                 return -1;
 369         }
 370         return 1;
 371 }
 372 




























































 373 /*
 374  * We should probably sort the phi list just to make it easier to compare
 375  * later for equality. 
 376  */
 377 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
 378 {
 379         pseudo_t new, phi;
 380 
 381         /*
 382          * Check for somewhat common case of duplicate
 383          * phi nodes.
 384          */
 385         new = first_pseudo(dominators)->def->phi_src;
 386         FOR_EACH_PTR(dominators, phi) {
 387                 if (new != phi->def->phi_src)
 388                         goto complex_phi;
 389                 new->ident = new->ident ? : phi->ident;
 390         } END_FOR_EACH_PTR(phi);
 391 
 392         /*
 393          * All the same pseudo - mark the phi-nodes unused
 394          * and convert the load into a LNOP and replace the
 395          * pseudo.
 396          */
 397         convert_load_instruction(insn, new);
 398         FOR_EACH_PTR(dominators, phi) {
 399                 kill_instruction(phi->def);
 400         } END_FOR_EACH_PTR(phi);
 401         goto end;

 402 
 403 complex_phi:
 404         /* We leave symbol pseudos with a bogus usage list here */
 405         if (insn->src->type != PSEUDO_SYM)
 406                 kill_use(&insn->src);
 407         insn->opcode = OP_PHI;
 408         insn->phi_list = dominators;

 409 
 410 end:
 411         repeat_phase |= REPEAT_SYMBOL_CLEANUP;



































































 412 }
 413 









 414 /* Kill a pseudo that is dead on exit from the bb */
 415 // The context is:
 416 // * the variable is not global but may have its address used (local/non-local)
 417 // * the stores are only needed by others functions which would do some
 418 //   loads via the escaped address
 419 // We start by the terminating BB (normal exit BB + no-return/unreachable)
 420 // We walkup the BB' intruction backward
 421 // * we're only concerned by loads, stores & calls
 422 // * if we reach a call                 -> we have to stop if var is non-local
 423 // * if we reach a load of our var      -> we have to stop
 424 // * if we reach a store of our var     -> we can kill it, it's dead
 425 // * we can ignore other stores & loads if the var is local
 426 // * if we reach another store or load done via non-symbol access
 427 //   (so done via some address calculation) -> we have to stop
 428 // If we reach the top of the BB we can recurse into the parents BBs.
 429 static void kill_dead_stores_bb(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
 430 {
 431         struct instruction *insn;
 432         struct basic_block *parent;
 433 
 434         if (bb->generation == generation)
 435                 return;
 436         bb->generation = generation;
 437         FOR_EACH_PTR_REVERSE(bb->insns, insn) {
 438                 if (!insn->bb)



 439                         continue;
 440                 switch (insn->opcode) {
 441                 case OP_LOAD:
 442                         if (insn->src == pseudo)
 443                                 return;
 444                         break;
 445                 case OP_STORE:
 446                         if (insn->src == pseudo) {
 447                                 kill_instruction_force(insn);
 448                                 continue;
 449                         }
 450                         break;
 451                 case OP_CALL:
 452                         if (!local)
 453                                 return;
 454                 default:
 455                         continue;
 456                 }
 457                 if (!local && insn->src->type != PSEUDO_SYM)


 458                         return;
 459         } END_FOR_EACH_PTR_REVERSE(insn);
 460 
 461         FOR_EACH_PTR(bb->parents, parent) {
 462                 if (bb_list_size(parent->children) > 1)






























 463                         continue;
 464                 kill_dead_stores_bb(pseudo, generation, parent, local);
























 465         } END_FOR_EACH_PTR(parent);
 466 }
 467 
 468 void check_access(struct instruction *insn)
 469 {
 470         pseudo_t pseudo = insn->src;
 471 
 472         if (insn->bb && pseudo->type == PSEUDO_SYM) {
 473                 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
 474                 struct symbol *sym = pseudo->sym;
 475 
 476                 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size)) {
 477                         if (insn->tainted)
 478                                 return;
 479                         warning(insn->pos, "invalid access %s '%s' (%d %d)",
 480                                 offset < 0 ? "below" : "past the end of",
 481                                 show_ident(sym->ident), offset,
 482                                 bits_to_bytes(sym->bit_size));
 483                         insn->tainted = 1;
 484                 }
 485         }
 486 }
 487 
 488 static struct pseudo_user *first_user(pseudo_t p)
 489 {

 490         struct pseudo_user *pu;
 491         FOR_EACH_PTR(p->users, pu) {
 492                 if (!pu)
 493                         continue;
 494                 return pu;
 495         } END_FOR_EACH_PTR(pu);
 496         return NULL;
 497 }
 498 
 499 void kill_dead_stores(struct entrypoint *ep, pseudo_t addr, int local)
 500 {
 501         unsigned long generation;
 502         struct basic_block *bb;
 503 
 504         switch (pseudo_user_list_size(addr->users)) {
 505         case 0:
 506                 return;
 507         case 1:
 508                 if (local) {
 509                         struct pseudo_user *pu = first_user(addr);





 510                         struct instruction *insn = pu->insn;
 511                         if (insn->opcode == OP_STORE) {
 512                                 kill_instruction_force(insn);
 513                                 return;
 514                         }
 515                 }











 516         default:
 517                 break;
 518         }

 519 
 520         generation = ++bb_generation;



























 521         FOR_EACH_PTR(ep->bbs, bb) {
 522                 if (bb->children)
 523                         continue;
 524                 kill_dead_stores_bb(addr, generation, bb, local);
 525         } END_FOR_EACH_PTR(bb);




 526 }
 527 









 528 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
 529 {
 530         struct basic_block *child;
 531 
 532         if (bb->generation == generation)
 533                 return;
 534         bb->generation = generation;
 535         FOR_EACH_PTR(bb->children, child) {
 536                 mark_bb_reachable(child, generation);
 537         } END_FOR_EACH_PTR(child);
 538 }
 539 
 540 static void kill_defs(struct instruction *insn)
 541 {
 542         pseudo_t target = insn->target;
 543 
 544         if (!has_use_list(target))
 545                 return;
 546         if (target->def != insn)
 547                 return;
 548 
 549         convert_instruction_target(insn, VOID);
 550 }
 551 
 552 void kill_bb(struct basic_block *bb)
 553 {
 554         struct instruction *insn;
 555         struct basic_block *child, *parent;
 556 
 557         FOR_EACH_PTR(bb->insns, insn) {
 558                 if (!insn->bb)
 559                         continue;
 560                 kill_instruction_force(insn);
 561                 kill_defs(insn);
 562                 /*
 563                  * We kill unreachable instructions even if they
 564                  * otherwise aren't "killable" (e.g. volatile loads)
 565                  */
 566         } END_FOR_EACH_PTR(insn);
 567         bb->insns = NULL;
 568 
 569         FOR_EACH_PTR(bb->children, child) {
 570                 remove_bb_from_list(&child->parents, bb, 0);
 571         } END_FOR_EACH_PTR(child);
 572         bb->children = NULL;
 573 
 574         FOR_EACH_PTR(bb->parents, parent) {
 575                 remove_bb_from_list(&parent->children, bb, 0);
 576         } END_FOR_EACH_PTR(parent);
 577         bb->parents = NULL;
 578 }
 579 


 614                 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
 615                 assert(changed);
 616                 return changed;
 617         case OP_SWITCH: {
 618                 struct multijmp *jmp;
 619                 FOR_EACH_PTR(insn->multijmp_list, jmp) {
 620                         changed |= rewrite_branch(bb, &jmp->target, old, new);
 621                 } END_FOR_EACH_PTR(jmp);
 622                 assert(changed);
 623                 return changed;
 624         }
 625         default:
 626                 return 0;
 627         }
 628 }
 629 
 630 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
 631 {
 632         struct basic_block *parent;
 633         struct basic_block *target = br->bb_true;

 634 
 635         if (br->opcode == OP_CBR) {
 636                 pseudo_t cond = br->cond;
 637                 if (cond->type != PSEUDO_VAL)
 638                         return NULL;
 639                 target = cond->value ? target : br->bb_false;
 640         }
 641 
 642         /*
 643          * We can't do FOR_EACH_PTR() here, because the parent list
 644          * may change when we rewrite the parent.
 645          */
 646         while ((parent = first_basic_block(bb->parents)) != NULL) {
 647                 if (!rewrite_parent_branch(parent, bb, target))
 648                         return NULL;
 649         }
 650         return target;
 651 }
 652 
 653 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
 654 {
 655         if (bb) {
 656                 struct basic_block *tmp;
 657                 int no_bb_in_list = 0;
 658 
 659                 FOR_EACH_PTR(list, tmp) {


 725 
 726 void pack_basic_blocks(struct entrypoint *ep)
 727 {
 728         struct basic_block *bb;
 729 
 730         /* See if we can merge a bb into another one.. */
 731         FOR_EACH_PTR(ep->bbs, bb) {
 732                 struct instruction *first, *insn;
 733                 struct basic_block *parent, *child, *last;
 734 
 735                 if (!bb_reachable(bb))
 736                         continue;
 737 
 738                 /*
 739                  * Just a branch?
 740                  */
 741                 FOR_EACH_PTR(bb->insns, first) {
 742                         if (!first->bb)
 743                                 continue;
 744                         switch (first->opcode) {
 745                         case OP_NOP:
 746                         case OP_INLINED_CALL:
 747                                 continue;
 748                         case OP_CBR:
 749                         case OP_BR: {
 750                                 struct basic_block *replace;
 751                                 replace = rewrite_branch_bb(bb, first);
 752                                 if (replace) {
 753                                         kill_bb(bb);
 754                                         goto no_merge;
 755                                 }
 756                         }
 757                         /* fallthrough */
 758                         default:
 759                                 goto out;
 760                         }
 761                 } END_FOR_EACH_PTR(first);
 762 
 763 out:
 764                 /*
 765                  * See if we only have one parent..
 766                  */


 772                                 continue;
 773                         }
 774                         last = parent;
 775                 } END_FOR_EACH_PTR(parent);
 776 
 777                 parent = last;
 778                 if (!parent || parent == bb)
 779                         continue;
 780 
 781                 /*
 782                  * Goodie. See if the parent can merge..
 783                  */
 784                 FOR_EACH_PTR(parent->children, child) {
 785                         if (child != bb)
 786                                 goto no_merge;
 787                 } END_FOR_EACH_PTR(child);
 788 
 789                 /*
 790                  * Merge the two.
 791                  */
 792                 repeat_phase |= REPEAT_CFG_CLEANUP;
 793 
 794                 parent->children = bb->children;
 795                 bb->children = NULL;
 796                 bb->parents = NULL;
 797 
 798                 FOR_EACH_PTR(parent->children, child) {
 799                         replace_bb_in_list(&child->parents, bb, parent, 0);
 800                 } END_FOR_EACH_PTR(child);
 801 
 802                 kill_instruction(delete_last_instruction(&parent->insns));
 803                 FOR_EACH_PTR(bb->insns, insn) {
 804                         if (!insn->bb)
 805                                 continue;
 806                         assert(insn->bb == bb);
 807                         insn->bb = parent;

 808                         add_instruction(&parent->insns, insn);
 809                 } END_FOR_EACH_PTR(insn);
 810                 bb->insns = NULL;
 811 
 812         no_merge:
 813                 /* nothing to do */;
 814         } END_FOR_EACH_PTR(bb);
 815 }
 816 
 817