1 /*
   2  * Flow - walk the linearized flowgraph, simplifying it as we
   3  * go along.
   4  *
   5  * Copyright (C) 2004 Linus Torvalds
   6  */
   7 
   8 #include <string.h>
   9 #include <stdarg.h>
  10 #include <stdlib.h>
  11 #include <stdio.h>
  12 #include <stddef.h>
  13 #include <assert.h>
  14 
  15 #include "parse.h"
  16 #include "expression.h"
  17 #include "linearize.h"
  18 #include "flow.h"
  19 #include "target.h"
  20 
  21 unsigned long bb_generation;
  22 
  23 /*
  24  * Dammit, if we have a phi-node followed by a conditional
  25  * branch on that phi-node, we should damn well be able to
  26  * do something about the source. Maybe.
  27  */
  28 static int rewrite_branch(struct basic_block *bb,
  29         struct basic_block **ptr,
  30         struct basic_block *old,
  31         struct basic_block *new)
  32 {
  33         if (*ptr != old || new == old || !bb->ep)
  34                 return 0;
  35 
  36         /* We might find new if-conversions or non-dominating CSEs */
  37         /* we may also create new dead cycles */
  38         repeat_phase |= REPEAT_CSE | REPEAT_CFG_CLEANUP;
  39         *ptr = new;
  40         replace_bb_in_list(&bb->children, old, new, 1);
  41         remove_bb_from_list(&old->parents, bb, 1);
  42         add_bb(&new->parents, bb);
  43         return 1;
  44 }
  45 
  46 /*
  47  * Return the known truth value of a pseudo, or -1 if
  48  * it's not known.
  49  */
  50 static int pseudo_truth_value(pseudo_t pseudo)
  51 {
  52         switch (pseudo->type) {
  53         case PSEUDO_VAL:
  54                 return !!pseudo->value;
  55 
  56         case PSEUDO_REG: {
  57                 struct instruction *insn = pseudo->def;
  58 
  59                 /* A symbol address is always considered true.. */
  60                 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
  61                         return 1;
  62         }
  63                 /* Fall through */
  64         default:
  65                 return -1;
  66         }
  67 }
  68 
  69 /*
  70  * Does a basic block depend on the pseudos that "src" defines?
  71  */
  72 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
  73 {
  74         pseudo_t pseudo;
  75 
  76         FOR_EACH_PTR(src->defines, pseudo) {
  77                 if (pseudo_in_list(target->needs, pseudo))
  78                         return 1;
  79         } END_FOR_EACH_PTR(pseudo);
  80         return 0;
  81 }
  82 
  83 /*
  84  * This really should be handled by bb_depends_on()
  85  * which efficiently check the dependence using the
  86  * defines - needs liveness info. Problem is that
  87  * there is no liveness done on OP_PHI & OP_PHISRC.
  88  *
  89  * This function add the missing dependency checks.
  90  */
  91 static int bb_depends_on_phi(struct basic_block *target, struct basic_block *src)
  92 {
  93         struct instruction *insn;
  94         FOR_EACH_PTR(src->insns, insn) {
  95                 if (!insn->bb)
  96                         continue;
  97                 if (insn->opcode != OP_PHI)
  98                         continue;
  99                 if (pseudo_in_list(target->needs, insn->target))
 100                         return 1;
 101         } END_FOR_EACH_PTR(insn);
 102         return 0;
 103 }
 104 
 105 /*
 106  * When we reach here, we have:
 107  *  - a basic block that ends in a conditional branch and
 108  *    that has no side effects apart from the pseudos it
 109  *    may change.
 110  *  - the phi-node that the conditional branch depends on
 111  *  - full pseudo liveness information
 112  *
 113  * We need to check if any of the _sources_ of the phi-node
 114  * may be constant, and not actually need this block at all.
 115  */
 116 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
 117 {
 118         int changed = 0;
 119         pseudo_t phi;
 120         int bogus;
 121 
 122         /*
 123          * This a due to improper dominance tracking during
 124          * simplify_symbol_usage()/conversion to SSA form.
 125          * No sane simplification can be done when we have this.
 126          */
 127         bogus = bb_list_size(bb->parents) != pseudo_list_size(first->phi_list);
 128 
 129         FOR_EACH_PTR(first->phi_list, phi) {
 130                 struct instruction *def = phi->def;
 131                 struct basic_block *source, *target;
 132                 pseudo_t pseudo;
 133                 struct instruction *br;
 134                 int true;
 135 
 136                 if (!def)
 137                         continue;
 138                 source = def->bb;
 139                 pseudo = def->src1;
 140                 if (!pseudo || !source)
 141                         continue;
 142                 br = last_instruction(source->insns);
 143                 if (!br)
 144                         continue;
 145                 if (br->opcode != OP_CBR && br->opcode != OP_BR)
 146                         continue;
 147                 true = pseudo_truth_value(pseudo);
 148                 if (true < 0)
 149                         continue;
 150                 target = true ? second->bb_true : second->bb_false;
 151                 if (bb_depends_on(target, bb))
 152                         continue;
 153                 if (bb_depends_on_phi(target, bb))
 154                         continue;
 155                 changed |= rewrite_branch(source, &br->bb_true, bb, target);
 156                 changed |= rewrite_branch(source, &br->bb_false, bb, target);
 157                 if (changed && !bogus)
 158                         kill_use(THIS_ADDRESS(phi));
 159         } END_FOR_EACH_PTR(phi);
 160         return changed;
 161 }
 162 
 163 static int bb_has_side_effects(struct basic_block *bb)
 164 {
 165         struct instruction *insn;
 166         FOR_EACH_PTR(bb->insns, insn) {
 167                 switch (insn->opcode) {
 168                 case OP_CALL:
 169                         /* FIXME! This should take "const" etc into account */
 170                         return 1;
 171 
 172                 case OP_STORE:
 173                 case OP_CONTEXT:
 174                         return 1;
 175 
 176                 case OP_ASM:
 177                         /* FIXME! This should take "volatile" etc into account */
 178                         return 1;
 179 
 180                 default:
 181                         continue;
 182                 }
 183         } END_FOR_EACH_PTR(insn);
 184         return 0;
 185 }
 186 
 187 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
 188 {
 189         pseudo_t cond = br->cond;
 190         struct instruction *def;
 191 
 192         if (cond->type != PSEUDO_REG)
 193                 return 0;
 194         def = cond->def;
 195         if (def->bb != bb || def->opcode != OP_PHI)
 196                 return 0;
 197         if (bb_has_side_effects(bb))
 198                 return 0;
 199         return try_to_simplify_bb(bb, def, br);
 200 }
 201 
 202 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
 203         struct basic_block **target_p, int true)
 204 {
 205         struct basic_block *target = *target_p, *final;
 206         struct instruction *insn;
 207         int retval;
 208 
 209         if (target == bb)
 210                 return 0;
 211         insn = last_instruction(target->insns);
 212         if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
 213                 return 0;
 214         /*
 215          * Ahhah! We've found a branch to a branch on the same conditional!
 216          * Now we just need to see if we can rewrite the branch..
 217          */
 218         retval = 0;
 219         final = true ? insn->bb_true : insn->bb_false;
 220         if (bb_has_side_effects(target))
 221                 goto try_to_rewrite_target;
 222         if (bb_depends_on(final, target))
 223                 goto try_to_rewrite_target;
 224         if (bb_depends_on_phi(final, target))
 225                 return 0;
 226         return rewrite_branch(bb, target_p, target, final);
 227 
 228 try_to_rewrite_target:
 229         /*
 230          * If we're the only parent, at least we can rewrite the
 231          * now-known second branch.
 232          */
 233         if (bb_list_size(target->parents) != 1)
 234                 return retval;
 235         insert_branch(target, insn, final);
 236         return 1;
 237 }
 238 
 239 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
 240 {
 241         if (simplify_phi_branch(bb, br))
 242                 return 1;
 243         return simplify_branch_branch(bb, br, &br->bb_true, 1) |
 244                simplify_branch_branch(bb, br, &br->bb_false, 0);
 245 }
 246 
 247 static int simplify_branch_nodes(struct entrypoint *ep)
 248 {
 249         int changed = 0;
 250         struct basic_block *bb;
 251 
 252         FOR_EACH_PTR(ep->bbs, bb) {
 253                 struct instruction *br = last_instruction(bb->insns);
 254 
 255                 if (!br || br->opcode != OP_CBR)
 256                         continue;
 257                 changed |= simplify_one_branch(bb, br);
 258         } END_FOR_EACH_PTR(bb);
 259         return changed;
 260 }
 261 
 262 /*
 263  * This is called late - when we have intra-bb liveness information..
 264  */
 265 int simplify_flow(struct entrypoint *ep)
 266 {
 267         return simplify_branch_nodes(ep);
 268 }
 269 
 270 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
 271 {
 272         concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
 273 }
 274 
 275 void convert_instruction_target(struct instruction *insn, pseudo_t src)
 276 {
 277         pseudo_t target;
 278         struct pseudo_user *pu;
 279         /*
 280          * Go through the "insn->users" list and replace them all..
 281          */
 282         target = insn->target;
 283         if (target == src)
 284                 return;
 285         FOR_EACH_PTR(target->users, pu) {
 286                 if (*pu->userp != VOID) {
 287                         assert(*pu->userp == target);
 288                         *pu->userp = src;
 289                 }
 290         } END_FOR_EACH_PTR(pu);
 291         if (has_use_list(src))
 292                 concat_user_list(target->users, &src->users);
 293         target->users = NULL;
 294 }
 295 
 296 void convert_load_instruction(struct instruction *insn, pseudo_t src)
 297 {
 298         convert_instruction_target(insn, src);
 299         /* Turn the load into a no-op */
 300         insn->opcode = OP_LNOP;
 301         insn->bb = NULL;
 302 }
 303 
 304 static int overlapping_memop(struct instruction *a, struct instruction *b)
 305 {
 306         unsigned int a_start = bytes_to_bits(a->offset);
 307         unsigned int b_start = bytes_to_bits(b->offset);
 308         unsigned int a_size = a->size;
 309         unsigned int b_size = b->size;
 310 
 311         if (a_size + a_start <= b_start)
 312                 return 0;
 313         if (b_size + b_start <= a_start)
 314                 return 0;
 315         return 1;
 316 }
 317 
 318 static inline int same_memop(struct instruction *a, struct instruction *b)
 319 {
 320         return  a->offset == b->offset && a->size == b->size;
 321 }
 322 
 323 static inline int distinct_symbols(pseudo_t a, pseudo_t b)
 324 {
 325         if (a->type != PSEUDO_SYM)
 326                 return 0;
 327         if (b->type != PSEUDO_SYM)
 328                 return 0;
 329         return a->sym != b->sym;
 330 }
 331 
 332 /*
 333  * Return 1 if "dom" dominates the access to "pseudo"
 334  * in "insn".
 335  *
 336  * Return 0 if it doesn't, and -1 if you don't know.
 337  */
 338 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
 339 {
 340         int opcode = dom->opcode;
 341 
 342         if (opcode == OP_CALL || opcode == OP_ENTRY)
 343                 return local ? 0 : -1;
 344         if (opcode != OP_LOAD && opcode != OP_STORE)
 345                 return 0;
 346         if (dom->src != pseudo) {
 347                 if (local)
 348                         return 0;
 349                 /* We don't think two explicitly different symbols ever alias */
 350                 if (distinct_symbols(insn->src, dom->src))
 351                         return 0;
 352                 /* We could try to do some alias analysis here */
 353                 return -1;
 354         }
 355         if (!same_memop(insn, dom)) {
 356                 if (dom->opcode == OP_LOAD)
 357                         return 0;
 358                 if (!overlapping_memop(insn, dom))
 359                         return 0;
 360                 return -1;
 361         }
 362         return 1;
 363 }
 364 
 365 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
 366 {
 367         pseudo_t p;
 368         FOR_EACH_PTR(list, p) {
 369                 if (p->def->bb == bb)
 370                         return 1;
 371         } END_FOR_EACH_PTR(p);
 372 
 373         return 0;
 374 }
 375 
 376 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
 377         struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
 378         int local)
 379 {
 380         struct basic_block *parent;
 381 
 382         if (!bb->parents)
 383                 return !!local;
 384 
 385         FOR_EACH_PTR(bb->parents, parent) {
 386                 struct instruction *one;
 387                 struct instruction *br;
 388                 pseudo_t phi;
 389 
 390                 FOR_EACH_PTR_REVERSE(parent->insns, one) {
 391                         int dominance;
 392                         if (one == insn)
 393                                 goto no_dominance;
 394                         dominance = dominates(pseudo, insn, one, local);
 395                         if (dominance < 0) {
 396                                 if (one->opcode == OP_LOAD)
 397                                         continue;
 398                                 return 0;
 399                         }
 400                         if (!dominance)
 401                                 continue;
 402                         goto found_dominator;
 403                 } END_FOR_EACH_PTR_REVERSE(one);
 404 no_dominance:
 405                 if (parent->generation == generation)
 406                         continue;
 407                 parent->generation = generation;
 408 
 409                 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
 410                         return 0;
 411                 continue;
 412 
 413 found_dominator:
 414                 if (dominators && phisrc_in_bb(*dominators, parent))
 415                         continue;
 416                 br = delete_last_instruction(&parent->insns);
 417                 phi = alloc_phi(parent, one->target, one->size);
 418                 phi->ident = phi->ident ? : pseudo->ident;
 419                 add_instruction(&parent->insns, br);
 420                 use_pseudo(insn, phi, add_pseudo(dominators, phi));
 421         } END_FOR_EACH_PTR(parent);
 422         return 1;
 423 }               
 424 
 425 /*
 426  * We should probably sort the phi list just to make it easier to compare
 427  * later for equality. 
 428  */
 429 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
 430 {
 431         pseudo_t new, phi;
 432 
 433         /*
 434          * Check for somewhat common case of duplicate
 435          * phi nodes.
 436          */
 437         new = first_pseudo(dominators)->def->src1;
 438         FOR_EACH_PTR(dominators, phi) {
 439                 if (new != phi->def->src1)
 440                         goto complex_phi;
 441                 new->ident = new->ident ? : phi->ident;
 442         } END_FOR_EACH_PTR(phi);
 443 
 444         /*
 445          * All the same pseudo - mark the phi-nodes unused
 446          * and convert the load into a LNOP and replace the
 447          * pseudo.
 448          */
 449         FOR_EACH_PTR(dominators, phi) {
 450                 kill_instruction(phi->def);
 451         } END_FOR_EACH_PTR(phi);
 452         convert_load_instruction(insn, new);
 453         return;
 454 
 455 complex_phi:
 456         /* We leave symbol pseudos with a bogus usage list here */
 457         if (insn->src->type != PSEUDO_SYM)
 458                 kill_use(&insn->src);
 459         insn->opcode = OP_PHI;
 460         insn->phi_list = dominators;
 461 }
 462 
 463 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
 464         unsigned long generation, int local)
 465 {
 466         struct basic_block *bb = insn->bb;
 467         struct instruction *one, *dom = NULL;
 468         struct pseudo_list *dominators;
 469         int partial;
 470 
 471         /* Unreachable load? Undo it */
 472         if (!bb) {
 473                 insn->opcode = OP_LNOP;
 474                 return 1;
 475         }
 476 
 477         partial = 0;
 478         FOR_EACH_PTR(bb->insns, one) {
 479                 int dominance;
 480                 if (one == insn)
 481                         goto found;
 482                 dominance = dominates(pseudo, insn, one, local);
 483                 if (dominance < 0) {
 484                         /* Ignore partial load dominators */
 485                         if (one->opcode == OP_LOAD)
 486                                 continue;
 487                         dom = NULL;
 488                         partial = 1;
 489                         continue;
 490                 }
 491                 if (!dominance)
 492                         continue;
 493                 dom = one;
 494                 partial = 0;
 495         } END_FOR_EACH_PTR(one);
 496         /* Whaa? */
 497         warning(pseudo->sym->pos, "unable to find symbol read");
 498         return 0;
 499 found:
 500         if (partial)
 501                 return 0;
 502 
 503         if (dom) {
 504                 convert_load_instruction(insn, dom->target);
 505                 return 1;
 506         }
 507 
 508         /* OK, go find the parents */
 509         bb->generation = generation;
 510 
 511         dominators = NULL;
 512         if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
 513                 return 0;
 514 
 515         /* This happens with initial assignments to structures etc.. */
 516         if (!dominators) {
 517                 if (!local)
 518                         return 0;
 519                 check_access(insn);
 520                 convert_load_instruction(insn, value_pseudo(insn->type, 0));
 521                 return 1;
 522         }
 523 
 524         /*
 525          * If we find just one dominating instruction, we
 526          * can turn it into a direct thing. Otherwise we'll
 527          * have to turn the load into a phi-node of the
 528          * dominators.
 529          */
 530         rewrite_load_instruction(insn, dominators);
 531         return 1;
 532 }
 533 
 534 static void kill_store(struct instruction *insn)
 535 {
 536         if (insn) {
 537                 insn->bb = NULL;
 538                 insn->opcode = OP_SNOP;
 539                 kill_use(&insn->target);
 540         }
 541 }
 542 
 543 /* Kill a pseudo that is dead on exit from the bb */
 544 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
 545 {
 546         struct instruction *insn;
 547         struct basic_block *parent;
 548 
 549         if (bb->generation == generation)
 550                 return;
 551         bb->generation = generation;
 552         FOR_EACH_PTR_REVERSE(bb->insns, insn) {
 553                 int opcode = insn->opcode;
 554 
 555                 if (opcode != OP_LOAD && opcode != OP_STORE) {
 556                         if (local)
 557                                 continue;
 558                         if (opcode == OP_CALL)
 559                                 return;
 560                         continue;
 561                 }
 562                 if (insn->src == pseudo) {
 563                         if (opcode == OP_LOAD)
 564                                 return;
 565                         kill_store(insn);
 566                         continue;
 567                 }
 568                 if (local)
 569                         continue;
 570                 if (insn->src->type != PSEUDO_SYM)
 571                         return;
 572         } END_FOR_EACH_PTR_REVERSE(insn);
 573 
 574         FOR_EACH_PTR(bb->parents, parent) {
 575                 struct basic_block *child;
 576                 FOR_EACH_PTR(parent->children, child) {
 577                         if (child && child != bb)
 578                                 return;
 579                 } END_FOR_EACH_PTR(child);
 580                 kill_dead_stores(pseudo, generation, parent, local);
 581         } END_FOR_EACH_PTR(parent);
 582 }
 583 
 584 /*
 585  * This should see if the "insn" trivially dominates some previous store, and kill the
 586  * store if unnecessary.
 587  */
 588 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn, 
 589         unsigned long generation, struct basic_block *bb, int local, int found)
 590 {
 591         struct instruction *one;
 592         struct basic_block *parent;
 593 
 594         /* Unreachable store? Undo it */
 595         if (!bb) {
 596                 kill_store(insn);
 597                 return;
 598         }
 599         if (bb->generation == generation)
 600                 return;
 601         bb->generation = generation;
 602         FOR_EACH_PTR_REVERSE(bb->insns, one) {
 603                 int dominance;
 604                 if (!found) {
 605                         if (one != insn)
 606                                 continue;
 607                         found = 1;
 608                         continue;
 609                 }
 610                 dominance = dominates(pseudo, insn, one, local);
 611                 if (!dominance)
 612                         continue;
 613                 if (dominance < 0)
 614                         return;
 615                 if (one->opcode == OP_LOAD)
 616                         return;
 617                 kill_store(one);
 618         } END_FOR_EACH_PTR_REVERSE(one);
 619 
 620         if (!found) {
 621                 warning(bb->pos, "Unable to find instruction");
 622                 return;
 623         }
 624 
 625         FOR_EACH_PTR(bb->parents, parent) {
 626                 struct basic_block *child;
 627                 FOR_EACH_PTR(parent->children, child) {
 628                         if (child && child != bb)
 629                                 return;
 630                 } END_FOR_EACH_PTR(child);
 631                 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
 632         } END_FOR_EACH_PTR(parent);
 633 }
 634 
 635 void check_access(struct instruction *insn)
 636 {
 637         pseudo_t pseudo = insn->src;
 638 
 639         if (insn->bb && pseudo->type == PSEUDO_SYM) {
 640                 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
 641                 struct symbol *sym = pseudo->sym;
 642 
 643                 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
 644                         warning(insn->pos, "invalid access %s '%s' (%d %d)",
 645                                 offset < 0 ? "below" : "past the end of",
 646                                 show_ident(sym->ident), offset,
 647                                 bits_to_bytes(sym->bit_size));
 648         }
 649 }
 650 
 651 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
 652 {
 653         pseudo_t pseudo;
 654         struct pseudo_user *pu;
 655         unsigned long mod;
 656         int all;
 657 
 658         /* Never used as a symbol? */
 659         pseudo = sym->pseudo;
 660         if (!pseudo)
 661                 return;
 662 
 663         /* We don't do coverage analysis of volatiles.. */
 664         if (sym->ctype.modifiers & MOD_VOLATILE)
 665                 return;
 666 
 667         /* ..and symbols with external visibility need more care */
 668         mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
 669         if (mod)
 670                 goto external_visibility;
 671 
 672         FOR_EACH_PTR(pseudo->users, pu) {
 673                 /* We know that the symbol-pseudo use is the "src" in the instruction */
 674                 struct instruction *insn = pu->insn;
 675 
 676                 switch (insn->opcode) {
 677                 case OP_STORE:
 678                         break;
 679                 case OP_LOAD:
 680                         break;
 681                 case OP_SYMADDR:
 682                         if (!insn->bb)
 683                                 continue;
 684                         mod |= MOD_ADDRESSABLE;
 685                         goto external_visibility;
 686                 case OP_NOP:
 687                 case OP_SNOP:
 688                 case OP_LNOP:
 689                 case OP_PHI:
 690                         continue;
 691                 default:
 692                         warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
 693                 }
 694         } END_FOR_EACH_PTR(pu);
 695 
 696 external_visibility:
 697         all = 1;
 698         FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
 699                 struct instruction *insn = pu->insn;
 700                 if (insn->opcode == OP_LOAD)
 701                         all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
 702         } END_FOR_EACH_PTR_REVERSE(pu);
 703 
 704         /* If we converted all the loads, remove the stores. They are dead */
 705         if (all && !mod) {
 706                 FOR_EACH_PTR(pseudo->users, pu) {
 707                         struct instruction *insn = pu->insn;
 708                         if (insn->opcode == OP_STORE)
 709                                 kill_store(insn);
 710                 } END_FOR_EACH_PTR(pu);
 711         } else {
 712                 /*
 713                  * If we couldn't take the shortcut, see if we can at least kill some
 714                  * of them..
 715                  */
 716                 FOR_EACH_PTR(pseudo->users, pu) {
 717                         struct instruction *insn = pu->insn;
 718                         if (insn->opcode == OP_STORE)
 719                                 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
 720                 } END_FOR_EACH_PTR(pu);
 721 
 722                 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
 723                         struct basic_block *bb;
 724                         FOR_EACH_PTR(ep->bbs, bb) {
 725                                 if (!bb->children)
 726                                         kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
 727                         } END_FOR_EACH_PTR(bb);
 728                 }
 729         }
 730                         
 731         return;
 732 }
 733 
 734 void simplify_symbol_usage(struct entrypoint *ep)
 735 {
 736         pseudo_t pseudo;
 737 
 738         FOR_EACH_PTR(ep->accesses, pseudo) {
 739                 simplify_one_symbol(ep, pseudo->sym);
 740         } END_FOR_EACH_PTR(pseudo);
 741 }
 742 
 743 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
 744 {
 745         struct basic_block *child;
 746 
 747         if (bb->generation == generation)
 748                 return;
 749         bb->generation = generation;
 750         FOR_EACH_PTR(bb->children, child) {
 751                 mark_bb_reachable(child, generation);
 752         } END_FOR_EACH_PTR(child);
 753 }
 754 
 755 static void kill_defs(struct instruction *insn)
 756 {
 757         pseudo_t target = insn->target;
 758 
 759         if (!has_use_list(target))
 760                 return;
 761         if (target->def != insn)
 762                 return;
 763 
 764         convert_instruction_target(insn, VOID);
 765 }
 766 
 767 void kill_bb(struct basic_block *bb)
 768 {
 769         struct instruction *insn;
 770         struct basic_block *child, *parent;
 771 
 772         FOR_EACH_PTR(bb->insns, insn) {
 773                 kill_instruction_force(insn);
 774                 kill_defs(insn);
 775                 /*
 776                  * We kill unreachable instructions even if they
 777                  * otherwise aren't "killable" (e.g. volatile loads)
 778                  */
 779         } END_FOR_EACH_PTR(insn);
 780         bb->insns = NULL;
 781 
 782         FOR_EACH_PTR(bb->children, child) {
 783                 remove_bb_from_list(&child->parents, bb, 0);
 784         } END_FOR_EACH_PTR(child);
 785         bb->children = NULL;
 786 
 787         FOR_EACH_PTR(bb->parents, parent) {
 788                 remove_bb_from_list(&parent->children, bb, 0);
 789         } END_FOR_EACH_PTR(parent);
 790         bb->parents = NULL;
 791 }
 792 
 793 void kill_unreachable_bbs(struct entrypoint *ep)
 794 {
 795         struct basic_block *bb;
 796         unsigned long generation = ++bb_generation;
 797 
 798         mark_bb_reachable(ep->entry->bb, generation);
 799         FOR_EACH_PTR(ep->bbs, bb) {
 800                 if (bb->generation == generation)
 801                         continue;
 802                 /* Mark it as being dead */
 803                 kill_bb(bb);
 804                 bb->ep = NULL;
 805                 DELETE_CURRENT_PTR(bb);
 806         } END_FOR_EACH_PTR(bb);
 807         PACK_PTR_LIST(&ep->bbs);
 808 }
 809 
 810 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
 811 {
 812         int changed = 0;
 813         struct instruction *insn = last_instruction(bb->insns);
 814 
 815         if (!insn)
 816                 return 0;
 817 
 818         /* Infinite loops: let's not "optimize" them.. */
 819         if (old == new)
 820                 return 0;
 821 
 822         switch (insn->opcode) {
 823         case OP_CBR:
 824                 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
 825                 /* fall through */
 826         case OP_BR:
 827                 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
 828                 assert(changed);
 829                 return changed;
 830         case OP_SWITCH: {
 831                 struct multijmp *jmp;
 832                 FOR_EACH_PTR(insn->multijmp_list, jmp) {
 833                         changed |= rewrite_branch(bb, &jmp->target, old, new);
 834                 } END_FOR_EACH_PTR(jmp);
 835                 assert(changed);
 836                 return changed;
 837         }
 838         default:
 839                 return 0;
 840         }
 841 }
 842 
 843 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
 844 {
 845         struct basic_block *parent;
 846         struct basic_block *target = br->bb_true;
 847         struct basic_block *false = br->bb_false;
 848 
 849         if (br->opcode == OP_CBR) {
 850                 pseudo_t cond = br->cond;
 851                 if (cond->type != PSEUDO_VAL)
 852                         return NULL;
 853                 target = cond->value ? target : false;
 854         }
 855 
 856         /*
 857          * We can't do FOR_EACH_PTR() here, because the parent list
 858          * may change when we rewrite the parent.
 859          */
 860         while ((parent = first_basic_block(bb->parents)) != NULL) {
 861                 if (!rewrite_parent_branch(parent, bb, target))
 862                         return NULL;
 863         }
 864         return target;
 865 }
 866 
 867 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
 868 {
 869         if (bb) {
 870                 struct basic_block *tmp;
 871                 int no_bb_in_list = 0;
 872 
 873                 FOR_EACH_PTR(list, tmp) {
 874                         if (bb == tmp)
 875                                 return;
 876                 } END_FOR_EACH_PTR(tmp);
 877                 assert(no_bb_in_list);
 878         }
 879 }
 880 
 881 static void vrfy_parents(struct basic_block *bb)
 882 {
 883         struct basic_block *tmp;
 884         FOR_EACH_PTR(bb->parents, tmp) {
 885                 vrfy_bb_in_list(bb, tmp->children);
 886         } END_FOR_EACH_PTR(tmp);
 887 }
 888 
 889 static void vrfy_children(struct basic_block *bb)
 890 {
 891         struct basic_block *tmp;
 892         struct instruction *br = last_instruction(bb->insns);
 893 
 894         if (!br) {
 895                 assert(!bb->children);
 896                 return;
 897         }
 898         switch (br->opcode) {
 899                 struct multijmp *jmp;
 900         case OP_CBR:
 901                 vrfy_bb_in_list(br->bb_false, bb->children);
 902                 /* fall through */
 903         case OP_BR:
 904                 vrfy_bb_in_list(br->bb_true, bb->children);
 905                 break;
 906         case OP_SWITCH:
 907         case OP_COMPUTEDGOTO:
 908                 FOR_EACH_PTR(br->multijmp_list, jmp) {
 909                         vrfy_bb_in_list(jmp->target, bb->children);
 910                 } END_FOR_EACH_PTR(jmp);
 911                 break;
 912         default:
 913                 break;
 914         }
 915                 
 916         FOR_EACH_PTR(bb->children, tmp) {
 917                 vrfy_bb_in_list(bb, tmp->parents);
 918         } END_FOR_EACH_PTR(tmp);
 919 }
 920 
 921 static void vrfy_bb_flow(struct basic_block *bb)
 922 {
 923         vrfy_children(bb);
 924         vrfy_parents(bb);
 925 }
 926 
 927 void vrfy_flow(struct entrypoint *ep)
 928 {
 929         struct basic_block *bb;
 930         struct basic_block *entry = ep->entry->bb;
 931 
 932         FOR_EACH_PTR(ep->bbs, bb) {
 933                 if (bb == entry)
 934                         entry = NULL;
 935                 vrfy_bb_flow(bb);
 936         } END_FOR_EACH_PTR(bb);
 937         assert(!entry);
 938 }
 939 
 940 void pack_basic_blocks(struct entrypoint *ep)
 941 {
 942         struct basic_block *bb;
 943 
 944         /* See if we can merge a bb into another one.. */
 945         FOR_EACH_PTR(ep->bbs, bb) {
 946                 struct instruction *first, *insn;
 947                 struct basic_block *parent, *child, *last;
 948 
 949                 if (!bb_reachable(bb))
 950                         continue;
 951 
 952                 /*
 953                  * Just a branch?
 954                  */
 955                 FOR_EACH_PTR(bb->insns, first) {
 956                         if (!first->bb)
 957                                 continue;
 958                         switch (first->opcode) {
 959                         case OP_NOP: case OP_LNOP: case OP_SNOP:
 960                                 continue;
 961                         case OP_CBR:
 962                         case OP_BR: {
 963                                 struct basic_block *replace;
 964                                 replace = rewrite_branch_bb(bb, first);
 965                                 if (replace) {
 966                                         kill_bb(bb);
 967                                         goto no_merge;
 968                                 }
 969                         }
 970                         /* fallthrough */
 971                         default:
 972                                 goto out;
 973                         }
 974                 } END_FOR_EACH_PTR(first);
 975 
 976 out:
 977                 /*
 978                  * See if we only have one parent..
 979                  */
 980                 last = NULL;
 981                 FOR_EACH_PTR(bb->parents, parent) {
 982                         if (last) {
 983                                 if (last != parent)
 984                                         goto no_merge;
 985                                 continue;
 986                         }
 987                         last = parent;
 988                 } END_FOR_EACH_PTR(parent);
 989 
 990                 parent = last;
 991                 if (!parent || parent == bb)
 992                         continue;
 993 
 994                 /*
 995                  * Goodie. See if the parent can merge..
 996                  */
 997                 FOR_EACH_PTR(parent->children, child) {
 998                         if (child != bb)
 999                                 goto no_merge;
1000                 } END_FOR_EACH_PTR(child);
1001 
1002                 /*
1003                  * Merge the two.
1004                  */
1005                 repeat_phase |= REPEAT_CSE;
1006 
1007                 parent->children = bb->children;
1008                 bb->children = NULL;
1009                 bb->parents = NULL;
1010 
1011                 FOR_EACH_PTR(parent->children, child) {
1012                         replace_bb_in_list(&child->parents, bb, parent, 0);
1013                 } END_FOR_EACH_PTR(child);
1014 
1015                 kill_instruction(delete_last_instruction(&parent->insns));
1016                 FOR_EACH_PTR(bb->insns, insn) {
1017                         if (insn->bb) {
1018                                 assert(insn->bb == bb);
1019                                 insn->bb = parent;
1020                         }
1021                         add_instruction(&parent->insns, insn);
1022                 } END_FOR_EACH_PTR(insn);
1023                 bb->insns = NULL;
1024 
1025         no_merge:
1026                 /* nothing to do */;
1027         } END_FOR_EACH_PTR(bb);
1028 }
1029 
1030