1 /* 2 * Example of how to write a compiler with sparse 3 */ 4 #include <stdio.h> 5 #include <stdlib.h> 6 #include <stdarg.h> 7 #include <string.h> 8 #include <assert.h> 9 10 #include "symbol.h" 11 #include "expression.h" 12 #include "linearize.h" 13 #include "flow.h" 14 #include "storage.h" 15 #include "target.h" 16 17 static const char *opcodes[] = { 18 [OP_BADOP] = "bad_op", 19 20 /* Fn entrypoint */ 21 [OP_ENTRY] = "<entry-point>", 22 23 /* Terminator */ 24 [OP_RET] = "ret", 25 [OP_BR] = "br", 26 [OP_CBR] = "cbr", 27 [OP_SWITCH] = "switch", 28 [OP_INVOKE] = "invoke", 29 [OP_COMPUTEDGOTO] = "jmp *", 30 [OP_UNWIND] = "unwind", 31 32 /* Binary */ 33 [OP_ADD] = "add", 34 [OP_SUB] = "sub", 35 [OP_MULU] = "mulu", 36 [OP_MULS] = "muls", 37 [OP_DIVU] = "divu", 38 [OP_DIVS] = "divs", 39 [OP_MODU] = "modu", 40 [OP_MODS] = "mods", 41 [OP_SHL] = "shl", 42 [OP_LSR] = "lsr", 43 [OP_ASR] = "asr", 44 45 /* Logical */ 46 [OP_AND] = "and", 47 [OP_OR] = "or", 48 [OP_XOR] = "xor", 49 [OP_AND_BOOL] = "and-bool", 50 [OP_OR_BOOL] = "or-bool", 51 52 /* Binary comparison */ 53 [OP_SET_EQ] = "seteq", 54 [OP_SET_NE] = "setne", 55 [OP_SET_LE] = "setle", 56 [OP_SET_GE] = "setge", 57 [OP_SET_LT] = "setlt", 58 [OP_SET_GT] = "setgt", 59 [OP_SET_B] = "setb", 60 [OP_SET_A] = "seta", 61 [OP_SET_BE] = "setbe", 62 [OP_SET_AE] = "setae", 63 64 /* Uni */ 65 [OP_NOT] = "not", 66 [OP_NEG] = "neg", 67 68 /* Special three-input */ 69 [OP_SEL] = "select", 70 71 /* Memory */ 72 [OP_MALLOC] = "malloc", 73 [OP_FREE] = "free", 74 [OP_ALLOCA] = "alloca", 75 [OP_LOAD] = "load", 76 [OP_STORE] = "store", 77 [OP_SETVAL] = "set", 78 [OP_GET_ELEMENT_PTR] = "getelem", 79 80 /* Other */ 81 [OP_PHI] = "phi", 82 [OP_PHISOURCE] = "phisrc", 83 [OP_COPY] = "copy", 84 [OP_CAST] = "cast", 85 [OP_SCAST] = "scast", 86 [OP_FPCAST] = "fpcast", 87 [OP_PTRCAST] = "ptrcast", 88 [OP_CALL] = "call", 89 [OP_VANEXT] = "va_next", 90 [OP_VAARG] = "va_arg", 91 [OP_SLICE] = "slice", 92 [OP_SNOP] = "snop", 93 [OP_LNOP] = "lnop", 94 [OP_NOP] = "nop", 95 [OP_DEATHNOTE] = "dead", 96 [OP_ASM] = "asm", 97 98 /* Sparse tagging (line numbers, context, whatever) */ 99 [OP_CONTEXT] = "context", 100 }; 101 102 static int last_reg, stack_offset; 103 104 struct hardreg { 105 const char *name; 106 struct pseudo_list *contains; 107 unsigned busy:16, 108 dead:8, 109 used:1; 110 }; 111 112 #define TAG_DEAD 1 113 #define TAG_DIRTY 2 114 115 /* Our "switch" generation is very very stupid. */ 116 #define SWITCH_REG (1) 117 118 static void output_bb(struct basic_block *bb, unsigned long generation); 119 120 /* 121 * We only know about the caller-clobbered registers 122 * right now. 123 */ 124 static struct hardreg hardregs[] = { 125 { .name = "%eax" }, 126 { .name = "%edx" }, 127 { .name = "%ecx" }, 128 { .name = "%ebx" }, 129 { .name = "%esi" }, 130 { .name = "%edi" }, 131 132 { .name = "%ebp" }, 133 { .name = "%esp" }, 134 }; 135 #define REGNO 6 136 #define REG_EBP 6 137 #define REG_ESP 7 138 139 struct bb_state { 140 struct position pos; 141 struct storage_hash_list *inputs; 142 struct storage_hash_list *outputs; 143 struct storage_hash_list *internal; 144 145 /* CC cache.. */ 146 int cc_opcode, cc_dead; 147 pseudo_t cc_target; 148 }; 149 150 enum optype { 151 OP_UNDEF, 152 OP_REG, 153 OP_VAL, 154 OP_MEM, 155 OP_ADDR, 156 }; 157 158 struct operand { 159 enum optype type; 160 int size; 161 union { 162 struct hardreg *reg; 163 long long value; 164 struct /* OP_MEM and OP_ADDR */ { 165 unsigned int offset; 166 unsigned int scale; 167 struct symbol *sym; 168 struct hardreg *base; 169 struct hardreg *index; 170 }; 171 }; 172 }; 173 174 static const char *show_op(struct bb_state *state, struct operand *op) 175 { 176 static char buf[256][4]; 177 static int bufnr; 178 char *p, *ret; 179 int nr; 180 181 nr = (bufnr + 1) & 3; 182 bufnr = nr; 183 ret = p = buf[nr]; 184 185 switch (op->type) { 186 case OP_UNDEF: 187 return "undef"; 188 case OP_REG: 189 return op->reg->name; 190 case OP_VAL: 191 sprintf(p, "$%lld", op->value); 192 break; 193 case OP_MEM: 194 case OP_ADDR: 195 if (op->offset) 196 p += sprintf(p, "%d", op->offset); 197 if (op->sym) 198 p += sprintf(p, "%s%s", 199 op->offset ? "+" : "", 200 show_ident(op->sym->ident)); 201 if (op->base || op->index) { 202 p += sprintf(p, "(%s%s%s", 203 op->base ? op->base->name : "", 204 (op->base && op->index) ? "," : "", 205 op->index ? op->index->name : ""); 206 if (op->scale > 1) 207 p += sprintf(p, ",%d", op->scale); 208 *p++ = ')'; 209 *p = '\0'; 210 } 211 break; 212 } 213 return ret; 214 } 215 216 static struct storage_hash *find_storage_hash(pseudo_t pseudo, struct storage_hash_list *list) 217 { 218 struct storage_hash *entry; 219 FOR_EACH_PTR(list, entry) { 220 if (entry->pseudo == pseudo) 221 return entry; 222 } END_FOR_EACH_PTR(entry); 223 return NULL; 224 } 225 226 static struct storage_hash *find_or_create_hash(pseudo_t pseudo, struct storage_hash_list **listp) 227 { 228 struct storage_hash *entry; 229 230 entry = find_storage_hash(pseudo, *listp); 231 if (!entry) { 232 entry = alloc_storage_hash(alloc_storage()); 233 entry->pseudo = pseudo; 234 add_ptr_list(listp, entry); 235 } 236 return entry; 237 } 238 239 /* Eventually we should just build it up in memory */ 240 static void FORMAT_ATTR(2) output_line(struct bb_state *state, const char *fmt, ...) 241 { 242 va_list args; 243 244 va_start(args, fmt); 245 vprintf(fmt, args); 246 va_end(args); 247 } 248 249 static void FORMAT_ATTR(2) output_label(struct bb_state *state, const char *fmt, ...) 250 { 251 static char buffer[512]; 252 va_list args; 253 254 va_start(args, fmt); 255 vsnprintf(buffer, sizeof(buffer), fmt, args); 256 va_end(args); 257 258 output_line(state, "%s:\n", buffer); 259 } 260 261 static void FORMAT_ATTR(2) output_insn(struct bb_state *state, const char *fmt, ...) 262 { 263 static char buffer[512]; 264 va_list args; 265 266 va_start(args, fmt); 267 vsnprintf(buffer, sizeof(buffer), fmt, args); 268 va_end(args); 269 270 output_line(state, "\t%s\n", buffer); 271 } 272 273 #define output_insn(state, fmt, arg...) \ 274 output_insn(state, fmt "\t\t# %s" , ## arg , __FUNCTION__) 275 276 static void FORMAT_ATTR(2) output_comment(struct bb_state *state, const char *fmt, ...) 277 { 278 static char buffer[512]; 279 va_list args; 280 281 if (!verbose) 282 return; 283 va_start(args, fmt); 284 vsnprintf(buffer, sizeof(buffer), fmt, args); 285 va_end(args); 286 287 output_line(state, "\t# %s\n", buffer); 288 } 289 290 static const char *show_memop(struct storage *storage) 291 { 292 static char buffer[1000]; 293 294 if (!storage) 295 return "undef"; 296 switch (storage->type) { 297 case REG_FRAME: 298 sprintf(buffer, "%d(FP)", storage->offset); 299 break; 300 case REG_STACK: 301 sprintf(buffer, "%d(SP)", storage->offset); 302 break; 303 case REG_REG: 304 return hardregs[storage->regno].name; 305 default: 306 return show_storage(storage); 307 } 308 return buffer; 309 } 310 311 static int alloc_stack_offset(int size) 312 { 313 int ret = stack_offset; 314 stack_offset = ret + size; 315 return ret; 316 } 317 318 static void alloc_stack(struct bb_state *state, struct storage *storage) 319 { 320 storage->type = REG_STACK; 321 storage->offset = alloc_stack_offset(4); 322 } 323 324 /* 325 * Can we re-generate the pseudo, so that we don't need to 326 * flush it to memory? We can regenerate: 327 * - immediates and symbol addresses 328 * - pseudos we got as input in non-registers 329 * - pseudos we've already saved off earlier.. 330 */ 331 static int can_regenerate(struct bb_state *state, pseudo_t pseudo) 332 { 333 struct storage_hash *in; 334 335 switch (pseudo->type) { 336 case PSEUDO_VAL: 337 case PSEUDO_SYM: 338 return 1; 339 340 default: 341 in = find_storage_hash(pseudo, state->inputs); 342 if (in && in->storage->type != REG_REG) 343 return 1; 344 in = find_storage_hash(pseudo, state->internal); 345 if (in) 346 return 1; 347 } 348 return 0; 349 } 350 351 static void flush_one_pseudo(struct bb_state *state, struct hardreg *hardreg, pseudo_t pseudo) 352 { 353 struct storage_hash *out; 354 struct storage *storage; 355 356 if (can_regenerate(state, pseudo)) 357 return; 358 359 output_comment(state, "flushing %s from %s", show_pseudo(pseudo), hardreg->name); 360 out = find_storage_hash(pseudo, state->internal); 361 if (!out) { 362 out = find_storage_hash(pseudo, state->outputs); 363 if (!out) 364 out = find_or_create_hash(pseudo, &state->internal); 365 } 366 storage = out->storage; 367 switch (storage->type) { 368 default: 369 /* 370 * Aieee - the next user wants it in a register, but we 371 * need to flush it to memory in between. Which means that 372 * we need to allocate an internal one, dammit.. 373 */ 374 out = find_or_create_hash(pseudo, &state->internal); 375 storage = out->storage; 376 /* Fall through */ 377 case REG_UDEF: 378 alloc_stack(state, storage); 379 /* Fall through */ 380 case REG_STACK: 381 output_insn(state, "movl %s,%s", hardreg->name, show_memop(storage)); 382 break; 383 } 384 } 385 386 /* Flush a hardreg out to the storage it has.. */ 387 static void flush_reg(struct bb_state *state, struct hardreg *reg) 388 { 389 pseudo_t pseudo; 390 391 if (reg->busy) 392 output_comment(state, "reg %s flushed while busy is %d!", reg->name, reg->busy); 393 if (!reg->contains) 394 return; 395 reg->dead = 0; 396 reg->used = 1; 397 FOR_EACH_PTR(reg->contains, pseudo) { 398 if (CURRENT_TAG(pseudo) & TAG_DEAD) 399 continue; 400 if (!(CURRENT_TAG(pseudo) & TAG_DIRTY)) 401 continue; 402 flush_one_pseudo(state, reg, pseudo); 403 } END_FOR_EACH_PTR(pseudo); 404 free_ptr_list(®->contains); 405 } 406 407 static struct storage_hash *find_pseudo_storage(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 408 { 409 struct storage_hash *src; 410 411 src = find_storage_hash(pseudo, state->internal); 412 if (!src) { 413 src = find_storage_hash(pseudo, state->inputs); 414 if (!src) { 415 src = find_storage_hash(pseudo, state->outputs); 416 /* Undefined? Screw it! */ 417 if (!src) 418 return NULL; 419 420 /* 421 * If we found output storage, it had better be local stack 422 * that we flushed to earlier.. 423 */ 424 if (src->storage->type != REG_STACK) 425 return NULL; 426 } 427 } 428 429 /* 430 * Incoming pseudo with out any pre-set storage allocation? 431 * We can make up our own, and obviously prefer to get it 432 * in the register we already selected (if it hasn't been 433 * used yet). 434 */ 435 if (src->storage->type == REG_UDEF) { 436 if (reg && !reg->used) { 437 src->storage->type = REG_REG; 438 src->storage->regno = reg - hardregs; 439 return NULL; 440 } 441 alloc_stack(state, src->storage); 442 } 443 return src; 444 } 445 446 static void mark_reg_dead(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 447 { 448 pseudo_t p; 449 450 FOR_EACH_PTR(reg->contains, p) { 451 if (p != pseudo) 452 continue; 453 if (CURRENT_TAG(p) & TAG_DEAD) 454 continue; 455 output_comment(state, "marking pseudo %s in reg %s dead", show_pseudo(pseudo), reg->name); 456 TAG_CURRENT(p, TAG_DEAD); 457 reg->dead++; 458 } END_FOR_EACH_PTR(p); 459 } 460 461 static void add_pseudo_reg(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 462 { 463 output_comment(state, "added pseudo %s to reg %s", show_pseudo(pseudo), reg->name); 464 add_ptr_list_tag(®->contains, pseudo, TAG_DIRTY); 465 } 466 467 static struct hardreg *preferred_reg(struct bb_state *state, pseudo_t target) 468 { 469 struct storage_hash *dst; 470 471 dst = find_storage_hash(target, state->outputs); 472 if (dst) { 473 struct storage *storage = dst->storage; 474 if (storage->type == REG_REG) 475 return hardregs + storage->regno; 476 } 477 return NULL; 478 } 479 480 static struct hardreg *empty_reg(struct bb_state *state) 481 { 482 int i; 483 struct hardreg *reg = hardregs; 484 485 for (i = 0; i < REGNO; i++, reg++) { 486 if (!reg->contains) 487 return reg; 488 } 489 return NULL; 490 } 491 492 static struct hardreg *target_reg(struct bb_state *state, pseudo_t pseudo, pseudo_t target) 493 { 494 int i; 495 int unable_to_find_reg = 0; 496 struct hardreg *reg; 497 498 /* First, see if we have a preferred target register.. */ 499 reg = preferred_reg(state, target); 500 if (reg && !reg->contains) 501 goto found; 502 503 reg = empty_reg(state); 504 if (reg) 505 goto found; 506 507 i = last_reg; 508 do { 509 i++; 510 if (i >= REGNO) 511 i = 0; 512 reg = hardregs + i; 513 if (!reg->busy) { 514 flush_reg(state, reg); 515 last_reg = i; 516 goto found; 517 } 518 } while (i != last_reg); 519 assert(unable_to_find_reg); 520 521 found: 522 add_pseudo_reg(state, pseudo, reg); 523 return reg; 524 } 525 526 static struct hardreg *find_in_reg(struct bb_state *state, pseudo_t pseudo) 527 { 528 int i; 529 struct hardreg *reg; 530 531 for (i = 0; i < REGNO; i++) { 532 pseudo_t p; 533 534 reg = hardregs + i; 535 FOR_EACH_PTR(reg->contains, p) { 536 if (p == pseudo) { 537 last_reg = i; 538 output_comment(state, "found pseudo %s in reg %s (busy=%d)", show_pseudo(pseudo), reg->name, reg->busy); 539 return reg; 540 } 541 } END_FOR_EACH_PTR(p); 542 } 543 return NULL; 544 } 545 546 static void flush_pseudo(struct bb_state *state, pseudo_t pseudo, struct storage *storage) 547 { 548 struct hardreg *reg = find_in_reg(state, pseudo); 549 550 if (reg) 551 flush_reg(state, reg); 552 } 553 554 static void flush_cc_cache_to_reg(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 555 { 556 int opcode = state->cc_opcode; 557 558 state->cc_opcode = 0; 559 state->cc_target = NULL; 560 output_insn(state, "%s %s", opcodes[opcode], reg->name); 561 } 562 563 static void flush_cc_cache(struct bb_state *state) 564 { 565 pseudo_t pseudo = state->cc_target; 566 567 if (pseudo) { 568 struct hardreg *dst; 569 570 state->cc_target = NULL; 571 572 if (!state->cc_dead) { 573 dst = target_reg(state, pseudo, pseudo); 574 flush_cc_cache_to_reg(state, pseudo, dst); 575 } 576 } 577 } 578 579 static void add_cc_cache(struct bb_state *state, int opcode, pseudo_t pseudo) 580 { 581 assert(!state->cc_target); 582 state->cc_target = pseudo; 583 state->cc_opcode = opcode; 584 state->cc_dead = 0; 585 output_comment(state, "caching %s", opcodes[opcode]); 586 } 587 588 /* Fill a hardreg with the pseudo it has */ 589 static struct hardreg *fill_reg(struct bb_state *state, struct hardreg *hardreg, pseudo_t pseudo) 590 { 591 struct storage_hash *src; 592 struct instruction *def; 593 594 if (state->cc_target == pseudo) { 595 flush_cc_cache_to_reg(state, pseudo, hardreg); 596 return hardreg; 597 } 598 599 switch (pseudo->type) { 600 case PSEUDO_VAL: 601 output_insn(state, "movl $%lld,%s", pseudo->value, hardreg->name); 602 break; 603 case PSEUDO_SYM: 604 src = find_pseudo_storage(state, pseudo, NULL); 605 /* Static thing? */ 606 if (!src) { 607 output_insn(state, "movl $<%s>,%s", show_pseudo(pseudo), hardreg->name); 608 break; 609 } 610 switch (src->storage->type) { 611 case REG_REG: 612 /* Aiaiaiaiaii! Need to flush it to temporary memory */ 613 src = find_or_create_hash(pseudo, &state->internal); 614 /* Fall through */ 615 default: 616 alloc_stack(state, src->storage); 617 /* Fall through */ 618 case REG_STACK: 619 case REG_FRAME: 620 flush_pseudo(state, pseudo, src->storage); 621 output_insn(state, "leal %s,%s", show_memop(src->storage), hardreg->name); 622 break; 623 } 624 break; 625 case PSEUDO_ARG: 626 case PSEUDO_REG: 627 def = pseudo->def; 628 if (def && def->opcode == OP_SETVAL) { 629 output_insn(state, "movl $<%s>,%s", show_pseudo(def->target), hardreg->name); 630 break; 631 } 632 src = find_pseudo_storage(state, pseudo, hardreg); 633 if (!src) 634 break; 635 if (src->flags & TAG_DEAD) 636 mark_reg_dead(state, pseudo, hardreg); 637 output_insn(state, "mov.%d %s,%s", 32, show_memop(src->storage), hardreg->name); 638 break; 639 default: 640 output_insn(state, "reload %s from %s", hardreg->name, show_pseudo(pseudo)); 641 break; 642 } 643 return hardreg; 644 } 645 646 static struct hardreg *getreg(struct bb_state *state, pseudo_t pseudo, pseudo_t target) 647 { 648 struct hardreg *reg; 649 650 reg = find_in_reg(state, pseudo); 651 if (reg) 652 return reg; 653 reg = target_reg(state, pseudo, target); 654 return fill_reg(state, reg, pseudo); 655 } 656 657 static void move_reg(struct bb_state *state, struct hardreg *src, struct hardreg *dst) 658 { 659 output_insn(state, "movl %s,%s", src->name, dst->name); 660 } 661 662 static struct hardreg *copy_reg(struct bb_state *state, struct hardreg *src, pseudo_t target) 663 { 664 int i; 665 struct hardreg *reg; 666 667 /* If the container has been killed off, just re-use it */ 668 if (!src->contains) 669 return src; 670 671 /* If "src" only has one user, and the contents are dead, we can re-use it */ 672 if (src->busy == 1 && src->dead == 1) 673 return src; 674 675 reg = preferred_reg(state, target); 676 if (reg && !reg->contains) { 677 output_comment(state, "copying %s to preferred target %s", show_pseudo(target), reg->name); 678 move_reg(state, src, reg); 679 return reg; 680 } 681 682 for (i = 0; i < REGNO; i++) { 683 reg = hardregs + i; 684 if (!reg->contains) { 685 output_comment(state, "copying %s to %s", show_pseudo(target), reg->name); 686 output_insn(state, "movl %s,%s", src->name, reg->name); 687 return reg; 688 } 689 } 690 691 flush_reg(state, src); 692 return src; 693 } 694 695 static void put_operand(struct bb_state *state, struct operand *op) 696 { 697 switch (op->type) { 698 case OP_REG: 699 op->reg->busy--; 700 break; 701 case OP_ADDR: 702 case OP_MEM: 703 if (op->base) 704 op->base->busy--; 705 if (op->index) 706 op->index->busy--; 707 break; 708 default: 709 break; 710 } 711 } 712 713 static struct operand *alloc_op(void) 714 { 715 struct operand *op = malloc(sizeof(*op)); 716 memset(op, 0, sizeof(*op)); 717 return op; 718 } 719 720 static struct operand *get_register_operand(struct bb_state *state, pseudo_t pseudo, pseudo_t target) 721 { 722 struct operand *op = alloc_op(); 723 op->type = OP_REG; 724 op->reg = getreg(state, pseudo, target); 725 op->reg->busy++; 726 return op; 727 } 728 729 static int get_sym_frame_offset(struct bb_state *state, pseudo_t pseudo) 730 { 731 int offset = pseudo->nr; 732 if (offset < 0) { 733 offset = alloc_stack_offset(4); 734 pseudo->nr = offset; 735 } 736 return offset; 737 } 738 739 static struct operand *get_generic_operand(struct bb_state *state, pseudo_t pseudo) 740 { 741 struct hardreg *reg; 742 struct storage *src; 743 struct storage_hash *hash; 744 struct operand *op = malloc(sizeof(*op)); 745 746 memset(op, 0, sizeof(*op)); 747 switch (pseudo->type) { 748 case PSEUDO_VAL: 749 op->type = OP_VAL; 750 op->value = pseudo->value; 751 break; 752 753 case PSEUDO_SYM: { 754 struct symbol *sym = pseudo->sym; 755 op->type = OP_ADDR; 756 if (sym->ctype.modifiers & MOD_NONLOCAL) { 757 op->sym = sym; 758 break; 759 } 760 op->base = hardregs + REG_EBP; 761 op->offset = get_sym_frame_offset(state, pseudo); 762 break; 763 } 764 765 default: 766 reg = find_in_reg(state, pseudo); 767 if (reg) { 768 op->type = OP_REG; 769 op->reg = reg; 770 reg->busy++; 771 break; 772 } 773 hash = find_pseudo_storage(state, pseudo, NULL); 774 if (!hash) 775 break; 776 src = hash->storage; 777 switch (src->type) { 778 case REG_REG: 779 op->type = OP_REG; 780 op->reg = hardregs + src->regno; 781 op->reg->busy++; 782 break; 783 case REG_FRAME: 784 op->type = OP_MEM; 785 op->offset = src->offset; 786 op->base = hardregs + REG_EBP; 787 break; 788 case REG_STACK: 789 op->type = OP_MEM; 790 op->offset = src->offset; 791 op->base = hardregs + REG_ESP; 792 break; 793 default: 794 break; 795 } 796 } 797 return op; 798 } 799 800 /* Callers should be made to use the proper "operand" formats */ 801 static const char *generic(struct bb_state *state, pseudo_t pseudo) 802 { 803 struct hardreg *reg; 804 struct operand *op = get_generic_operand(state, pseudo); 805 static char buf[100]; 806 const char *str; 807 808 switch (op->type) { 809 case OP_ADDR: 810 if (!op->offset && op->base && !op->sym) 811 return op->base->name; 812 if (op->sym && !op->base) { 813 int len = sprintf(buf, "$ %s", show_op(state, op)); 814 if (op->offset) 815 sprintf(buf + len, " + %d", op->offset); 816 return buf; 817 } 818 str = show_op(state, op); 819 put_operand(state, op); 820 reg = target_reg(state, pseudo, NULL); 821 output_insn(state, "lea %s,%s", show_op(state, op), reg->name); 822 return reg->name; 823 824 default: 825 str = show_op(state, op); 826 } 827 put_operand(state, op); 828 return str; 829 } 830 831 static struct operand *get_address_operand(struct bb_state *state, struct instruction *memop) 832 { 833 struct hardreg *base; 834 struct operand *op = get_generic_operand(state, memop->src); 835 836 switch (op->type) { 837 case OP_ADDR: 838 op->offset += memop->offset; 839 break; 840 default: 841 put_operand(state, op); 842 base = getreg(state, memop->src, NULL); 843 op->type = OP_ADDR; 844 op->base = base; 845 base->busy++; 846 op->offset = memop->offset; 847 op->sym = NULL; 848 } 849 return op; 850 } 851 852 static const char *address(struct bb_state *state, struct instruction *memop) 853 { 854 struct operand *op = get_address_operand(state, memop); 855 const char *str = show_op(state, op); 856 put_operand(state, op); 857 return str; 858 } 859 860 static const char *reg_or_imm(struct bb_state *state, pseudo_t pseudo) 861 { 862 switch(pseudo->type) { 863 case PSEUDO_VAL: 864 return show_pseudo(pseudo); 865 default: 866 return getreg(state, pseudo, NULL)->name; 867 } 868 } 869 870 static void kill_dead_reg(struct hardreg *reg) 871 { 872 if (reg->dead) { 873 pseudo_t p; 874 875 FOR_EACH_PTR(reg->contains, p) { 876 if (CURRENT_TAG(p) & TAG_DEAD) { 877 DELETE_CURRENT_PTR(p); 878 reg->dead--; 879 } 880 } END_FOR_EACH_PTR(p); 881 PACK_PTR_LIST(®->contains); 882 assert(!reg->dead); 883 } 884 } 885 886 static struct hardreg *target_copy_reg(struct bb_state *state, struct hardreg *src, pseudo_t target) 887 { 888 kill_dead_reg(src); 889 return copy_reg(state, src, target); 890 } 891 892 static void do_binop(struct bb_state *state, struct instruction *insn, pseudo_t val1, pseudo_t val2) 893 { 894 const char *op = opcodes[insn->opcode]; 895 struct operand *src = get_register_operand(state, val1, insn->target); 896 struct operand *src2 = get_generic_operand(state, val2); 897 struct hardreg *dst; 898 899 dst = target_copy_reg(state, src->reg, insn->target); 900 output_insn(state, "%s.%d %s,%s", op, insn->size, show_op(state, src2), dst->name); 901 put_operand(state, src); 902 put_operand(state, src2); 903 add_pseudo_reg(state, insn->target, dst); 904 } 905 906 static void generate_binop(struct bb_state *state, struct instruction *insn) 907 { 908 flush_cc_cache(state); 909 do_binop(state, insn, insn->src1, insn->src2); 910 } 911 912 static int is_dead_reg(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 913 { 914 pseudo_t p; 915 FOR_EACH_PTR(reg->contains, p) { 916 if (p == pseudo) 917 return CURRENT_TAG(p) & TAG_DEAD; 918 } END_FOR_EACH_PTR(p); 919 return 0; 920 } 921 922 /* 923 * Commutative binops are much more flexible, since we can switch the 924 * sources around to satisfy the target register, or to avoid having 925 * to load one of them into a register.. 926 */ 927 static void generate_commutative_binop(struct bb_state *state, struct instruction *insn) 928 { 929 pseudo_t src1, src2; 930 struct hardreg *reg1, *reg2; 931 932 flush_cc_cache(state); 933 src1 = insn->src1; 934 src2 = insn->src2; 935 reg2 = find_in_reg(state, src2); 936 if (!reg2) 937 goto dont_switch; 938 reg1 = find_in_reg(state, src1); 939 if (!reg1) 940 goto do_switch; 941 if (!is_dead_reg(state, src2, reg2)) 942 goto dont_switch; 943 if (!is_dead_reg(state, src1, reg1)) 944 goto do_switch; 945 946 /* Both are dead. Is one preferable? */ 947 if (reg2 != preferred_reg(state, insn->target)) 948 goto dont_switch; 949 950 do_switch: 951 src1 = src2; 952 src2 = insn->src1; 953 dont_switch: 954 do_binop(state, insn, src1, src2); 955 } 956 957 /* 958 * This marks a pseudo dead. It still stays on the hardreg list (the hardreg 959 * still has its value), but it's scheduled to be killed after the next 960 * "sequence point" when we call "kill_read_pseudos()" 961 */ 962 static void mark_pseudo_dead(struct bb_state *state, pseudo_t pseudo) 963 { 964 int i; 965 struct storage_hash *src; 966 967 if (state->cc_target == pseudo) 968 state->cc_dead = 1; 969 src = find_pseudo_storage(state, pseudo, NULL); 970 if (src) 971 src->flags |= TAG_DEAD; 972 for (i = 0; i < REGNO; i++) 973 mark_reg_dead(state, pseudo, hardregs + i); 974 } 975 976 static void kill_dead_pseudos(struct bb_state *state) 977 { 978 int i; 979 980 for (i = 0; i < REGNO; i++) { 981 kill_dead_reg(hardregs + i); 982 } 983 } 984 985 static void generate_store(struct instruction *insn, struct bb_state *state) 986 { 987 output_insn(state, "mov.%d %s,%s", insn->size, reg_or_imm(state, insn->target), address(state, insn)); 988 } 989 990 static void generate_load(struct instruction *insn, struct bb_state *state) 991 { 992 const char *input = address(state, insn); 993 struct hardreg *dst; 994 995 kill_dead_pseudos(state); 996 dst = target_reg(state, insn->target, NULL); 997 output_insn(state, "mov.%d %s,%s", insn->size, input, dst->name); 998 } 999 1000 static void kill_pseudo(struct bb_state *state, pseudo_t pseudo) 1001 { 1002 int i; 1003 struct hardreg *reg; 1004 1005 output_comment(state, "killing pseudo %s", show_pseudo(pseudo)); 1006 for (i = 0; i < REGNO; i++) { 1007 pseudo_t p; 1008 1009 reg = hardregs + i; 1010 FOR_EACH_PTR(reg->contains, p) { 1011 if (p != pseudo) 1012 continue; 1013 if (CURRENT_TAG(p) & TAG_DEAD) 1014 reg->dead--; 1015 output_comment(state, "removing pseudo %s from reg %s", 1016 show_pseudo(pseudo), reg->name); 1017 DELETE_CURRENT_PTR(p); 1018 } END_FOR_EACH_PTR(p); 1019 PACK_PTR_LIST(®->contains); 1020 } 1021 } 1022 1023 static void generate_copy(struct bb_state *state, struct instruction *insn) 1024 { 1025 struct hardreg *src = getreg(state, insn->src, insn->target); 1026 kill_pseudo(state, insn->target); 1027 add_pseudo_reg(state, insn->target, src); 1028 } 1029 1030 static void generate_cast(struct bb_state *state, struct instruction *insn) 1031 { 1032 struct hardreg *src = getreg(state, insn->src, insn->target); 1033 struct hardreg *dst; 1034 unsigned int old = insn->orig_type ? insn->orig_type->bit_size : 0; 1035 unsigned int new = insn->size; 1036 1037 /* 1038 * Cast to smaller type? Ignore the high bits, we 1039 * just keep both pseudos in the same register. 1040 */ 1041 if (old >= new) { 1042 add_pseudo_reg(state, insn->target, src); 1043 return; 1044 } 1045 1046 dst = target_copy_reg(state, src, insn->target); 1047 1048 if (insn->orig_type && (insn->orig_type->ctype.modifiers & MOD_SIGNED)) { 1049 output_insn(state, "sext.%d.%d %s", old, new, dst->name); 1050 } else { 1051 unsigned long long mask; 1052 mask = ~(~0ULL << old); 1053 mask &= ~(~0ULL << new); 1054 output_insn(state, "andl.%d $%#llx,%s", insn->size, mask, dst->name); 1055 } 1056 add_pseudo_reg(state, insn->target, dst); 1057 } 1058 1059 static void generate_output_storage(struct bb_state *state); 1060 1061 static const char *conditional[] = { 1062 [OP_SET_EQ] = "e", 1063 [OP_SET_NE] = "ne", 1064 [OP_SET_LE] = "le", 1065 [OP_SET_GE] = "ge", 1066 [OP_SET_LT] = "lt", 1067 [OP_SET_GT] = "gt", 1068 [OP_SET_B] = "b", 1069 [OP_SET_A] = "a", 1070 [OP_SET_BE] = "be", 1071 [OP_SET_AE] = "ae" 1072 }; 1073 1074 1075 static void generate_branch(struct bb_state *state, struct instruction *br) 1076 { 1077 const char *cond = "XXX"; 1078 struct basic_block *target; 1079 1080 if (br->cond) { 1081 if (state->cc_target == br->cond) { 1082 cond = conditional[state->cc_opcode]; 1083 } else { 1084 struct hardreg *reg = getreg(state, br->cond, NULL); 1085 output_insn(state, "testl %s,%s", reg->name, reg->name); 1086 cond = "ne"; 1087 } 1088 } 1089 generate_output_storage(state); 1090 target = br->bb_true; 1091 if (br->cond) { 1092 output_insn(state, "j%s .L%p", cond, target); 1093 target = br->bb_false; 1094 } 1095 output_insn(state, "jmp .L%p", target); 1096 } 1097 1098 /* We've made sure that there is a dummy reg live for the output */ 1099 static void generate_switch(struct bb_state *state, struct instruction *insn) 1100 { 1101 struct hardreg *reg = hardregs + SWITCH_REG; 1102 1103 generate_output_storage(state); 1104 output_insn(state, "switch on %s", reg->name); 1105 output_insn(state, "unimplemented: %s", show_instruction(insn)); 1106 } 1107 1108 static void generate_ret(struct bb_state *state, struct instruction *ret) 1109 { 1110 if (ret->src && ret->src != VOID) { 1111 struct hardreg *wants = hardregs+0; 1112 struct hardreg *reg = getreg(state, ret->src, NULL); 1113 if (reg != wants) 1114 output_insn(state, "movl %s,%s", reg->name, wants->name); 1115 } 1116 output_insn(state, "ret"); 1117 } 1118 1119 /* 1120 * Fake "call" linearization just as a taster.. 1121 */ 1122 static void generate_call(struct bb_state *state, struct instruction *insn) 1123 { 1124 int offset = 0; 1125 pseudo_t arg; 1126 1127 FOR_EACH_PTR(insn->arguments, arg) { 1128 output_insn(state, "pushl %s", generic(state, arg)); 1129 offset += 4; 1130 } END_FOR_EACH_PTR(arg); 1131 flush_reg(state, hardregs+0); 1132 flush_reg(state, hardregs+1); 1133 flush_reg(state, hardregs+2); 1134 output_insn(state, "call %s", show_pseudo(insn->func)); 1135 if (offset) 1136 output_insn(state, "addl $%d,%%esp", offset); 1137 if (insn->target && insn->target != VOID) 1138 add_pseudo_reg(state, insn->target, hardregs+0); 1139 } 1140 1141 static void generate_select(struct bb_state *state, struct instruction *insn) 1142 { 1143 const char *cond; 1144 struct hardreg *src1, *src2, *dst; 1145 1146 src1 = getreg(state, insn->src2, NULL); 1147 dst = copy_reg(state, src1, insn->target); 1148 add_pseudo_reg(state, insn->target, dst); 1149 src2 = getreg(state, insn->src3, insn->target); 1150 1151 if (state->cc_target == insn->src1) { 1152 cond = conditional[state->cc_opcode]; 1153 } else { 1154 struct hardreg *reg = getreg(state, insn->src1, NULL); 1155 output_insn(state, "testl %s,%s", reg->name, reg->name); 1156 cond = "ne"; 1157 } 1158 1159 output_insn(state, "sel%s %s,%s", cond, src2->name, dst->name); 1160 } 1161 1162 struct asm_arg { 1163 const struct ident *name; 1164 const char *value; 1165 pseudo_t pseudo; 1166 struct hardreg *reg; 1167 }; 1168 1169 static void replace_asm_arg(char **dst_p, struct asm_arg *arg) 1170 { 1171 char *dst = *dst_p; 1172 int len = strlen(arg->value); 1173 1174 memcpy(dst, arg->value, len); 1175 *dst_p = dst + len; 1176 } 1177 1178 static void replace_asm_percent(const char **src_p, char **dst_p, struct asm_arg *args, int nr) 1179 { 1180 const char *src = *src_p; 1181 char c; 1182 int index; 1183 1184 c = *src++; 1185 switch (c) { 1186 case '0' ... '9': 1187 index = c - '0'; 1188 if (index < nr) 1189 replace_asm_arg(dst_p, args+index); 1190 break; 1191 } 1192 *src_p = src; 1193 return; 1194 } 1195 1196 static void replace_asm_named(const char **src_p, char **dst_p, struct asm_arg *args, int nr) 1197 { 1198 const char *src = *src_p; 1199 const char *end = src; 1200 1201 for(;;) { 1202 char c = *end++; 1203 if (!c) 1204 return; 1205 if (c == ']') { 1206 int i; 1207 1208 *src_p = end; 1209 for (i = 0; i < nr; i++) { 1210 const struct ident *ident = args[i].name; 1211 int len; 1212 if (!ident) 1213 continue; 1214 len = ident->len; 1215 if (memcmp(src, ident->name, len)) 1216 continue; 1217 replace_asm_arg(dst_p, args+i); 1218 return; 1219 } 1220 } 1221 } 1222 } 1223 1224 static const char *replace_asm_args(const char *str, struct asm_arg *args, int nr) 1225 { 1226 static char buffer[1000]; 1227 char *p = buffer; 1228 1229 for (;;) { 1230 char c = *str; 1231 *p = c; 1232 if (!c) 1233 return buffer; 1234 str++; 1235 switch (c) { 1236 case '%': 1237 if (*str == '%') { 1238 str++; 1239 p++; 1240 continue; 1241 } 1242 replace_asm_percent(&str, &p, args, nr); 1243 continue; 1244 case '[': 1245 replace_asm_named(&str, &p, args, nr); 1246 continue; 1247 default: 1248 break; 1249 } 1250 p++; 1251 } 1252 } 1253 1254 #define MAX_ASM_ARG (50) 1255 static struct asm_arg asm_arguments[MAX_ASM_ARG]; 1256 1257 static struct asm_arg *generate_asm_inputs(struct bb_state *state, struct asm_constraint_list *list, struct asm_arg *arg) 1258 { 1259 struct asm_constraint *entry; 1260 1261 FOR_EACH_PTR(list, entry) { 1262 const char *constraint = entry->constraint; 1263 pseudo_t pseudo = entry->pseudo; 1264 struct hardreg *reg, *orig; 1265 const char *string; 1266 int index; 1267 1268 string = "undef"; 1269 switch (*constraint) { 1270 case 'r': 1271 string = getreg(state, pseudo, NULL)->name; 1272 break; 1273 case '0' ... '9': 1274 index = *constraint - '0'; 1275 reg = asm_arguments[index].reg; 1276 orig = find_in_reg(state, pseudo); 1277 if (orig) 1278 move_reg(state, orig, reg); 1279 else 1280 fill_reg(state, reg, pseudo); 1281 string = reg->name; 1282 break; 1283 default: 1284 string = generic(state, pseudo); 1285 break; 1286 } 1287 1288 output_insn(state, "# asm input \"%s\": %s : %s", constraint, show_pseudo(pseudo), string); 1289 1290 arg->name = entry->ident; 1291 arg->value = string; 1292 arg->pseudo = NULL; 1293 arg->reg = NULL; 1294 arg++; 1295 } END_FOR_EACH_PTR(entry); 1296 return arg; 1297 } 1298 1299 static struct asm_arg *generate_asm_outputs(struct bb_state *state, struct asm_constraint_list *list, struct asm_arg *arg) 1300 { 1301 struct asm_constraint *entry; 1302 1303 FOR_EACH_PTR(list, entry) { 1304 const char *constraint = entry->constraint; 1305 pseudo_t pseudo = entry->pseudo; 1306 struct hardreg *reg; 1307 const char *string; 1308 1309 while (*constraint == '=' || *constraint == '+') 1310 constraint++; 1311 1312 string = "undef"; 1313 switch (*constraint) { 1314 case 'r': 1315 default: 1316 reg = target_reg(state, pseudo, NULL); 1317 arg->pseudo = pseudo; 1318 arg->reg = reg; 1319 string = reg->name; 1320 break; 1321 } 1322 1323 output_insn(state, "# asm output \"%s\": %s : %s", constraint, show_pseudo(pseudo), string); 1324 1325 arg->name = entry->ident; 1326 arg->value = string; 1327 arg++; 1328 } END_FOR_EACH_PTR(entry); 1329 return arg; 1330 } 1331 1332 static void generate_asm(struct bb_state *state, struct instruction *insn) 1333 { 1334 const char *str = insn->string; 1335 1336 if (insn->asm_rules->outputs || insn->asm_rules->inputs) { 1337 struct asm_arg *arg; 1338 1339 arg = generate_asm_outputs(state, insn->asm_rules->outputs, asm_arguments); 1340 arg = generate_asm_inputs(state, insn->asm_rules->inputs, arg); 1341 str = replace_asm_args(str, asm_arguments, arg - asm_arguments); 1342 } 1343 output_insn(state, "%s", str); 1344 } 1345 1346 static void generate_compare(struct bb_state *state, struct instruction *insn) 1347 { 1348 struct hardreg *src; 1349 const char *src2; 1350 int opcode; 1351 1352 flush_cc_cache(state); 1353 opcode = insn->opcode; 1354 1355 /* 1356 * We should try to switch these around if necessary, 1357 * and update the opcode to match.. 1358 */ 1359 src = getreg(state, insn->src1, insn->target); 1360 src2 = generic(state, insn->src2); 1361 1362 output_insn(state, "cmp.%d %s,%s", insn->size, src2, src->name); 1363 1364 add_cc_cache(state, opcode, insn->target); 1365 } 1366 1367 static void generate_one_insn(struct instruction *insn, struct bb_state *state) 1368 { 1369 if (verbose) 1370 output_comment(state, "%s", show_instruction(insn)); 1371 1372 switch (insn->opcode) { 1373 case OP_ENTRY: { 1374 struct symbol *sym = insn->bb->ep->name; 1375 const char *name = show_ident(sym->ident); 1376 if (sym->ctype.modifiers & MOD_STATIC) 1377 printf("\n\n%s:\n", name); 1378 else 1379 printf("\n\n.globl %s\n%s:\n", name, name); 1380 break; 1381 } 1382 1383 /* 1384 * OP_SETVAL likewise doesn't actually generate any 1385 * code. On use, the "def" of the pseudo will be 1386 * looked up. 1387 */ 1388 case OP_SETVAL: 1389 break; 1390 1391 case OP_STORE: 1392 generate_store(insn, state); 1393 break; 1394 1395 case OP_LOAD: 1396 generate_load(insn, state); 1397 break; 1398 1399 case OP_DEATHNOTE: 1400 mark_pseudo_dead(state, insn->target); 1401 return; 1402 1403 case OP_COPY: 1404 generate_copy(state, insn); 1405 break; 1406 1407 case OP_ADD: case OP_MULU: case OP_MULS: 1408 case OP_AND: case OP_OR: case OP_XOR: 1409 case OP_AND_BOOL: case OP_OR_BOOL: 1410 generate_commutative_binop(state, insn); 1411 break; 1412 1413 case OP_SUB: case OP_DIVU: case OP_DIVS: 1414 case OP_MODU: case OP_MODS: 1415 case OP_SHL: case OP_LSR: case OP_ASR: 1416 generate_binop(state, insn); 1417 break; 1418 1419 case OP_BINCMP ... OP_BINCMP_END: 1420 generate_compare(state, insn); 1421 break; 1422 1423 case OP_CAST: case OP_SCAST: case OP_FPCAST: case OP_PTRCAST: 1424 generate_cast(state, insn); 1425 break; 1426 1427 case OP_SEL: 1428 generate_select(state, insn); 1429 break; 1430 1431 case OP_BR: 1432 case OP_CBR: 1433 generate_branch(state, insn); 1434 break; 1435 1436 case OP_SWITCH: 1437 generate_switch(state, insn); 1438 break; 1439 1440 case OP_CALL: 1441 generate_call(state, insn); 1442 break; 1443 1444 case OP_RET: 1445 generate_ret(state, insn); 1446 break; 1447 1448 case OP_ASM: 1449 generate_asm(state, insn); 1450 break; 1451 1452 case OP_PHI: 1453 case OP_PHISOURCE: 1454 default: 1455 output_insn(state, "unimplemented: %s", show_instruction(insn)); 1456 break; 1457 } 1458 kill_dead_pseudos(state); 1459 } 1460 1461 #define VERY_BUSY 1000 1462 #define REG_FIXED 2000 1463 1464 static void write_reg_to_storage(struct bb_state *state, struct hardreg *reg, pseudo_t pseudo, struct storage *storage) 1465 { 1466 int i; 1467 struct hardreg *out; 1468 1469 switch (storage->type) { 1470 case REG_REG: 1471 out = hardregs + storage->regno; 1472 if (reg == out) 1473 return; 1474 output_insn(state, "movl %s,%s", reg->name, out->name); 1475 return; 1476 case REG_UDEF: 1477 if (reg->busy < VERY_BUSY) { 1478 storage->type = REG_REG; 1479 storage->regno = reg - hardregs; 1480 reg->busy = REG_FIXED; 1481 return; 1482 } 1483 1484 /* Try to find a non-busy register.. */ 1485 for (i = 0; i < REGNO; i++) { 1486 out = hardregs + i; 1487 if (out->contains) 1488 continue; 1489 output_insn(state, "movl %s,%s", reg->name, out->name); 1490 storage->type = REG_REG; 1491 storage->regno = i; 1492 out->busy = REG_FIXED; 1493 return; 1494 } 1495 1496 /* Fall back on stack allocation ... */ 1497 alloc_stack(state, storage); 1498 /* Fall through */ 1499 default: 1500 output_insn(state, "movl %s,%s", reg->name, show_memop(storage)); 1501 return; 1502 } 1503 } 1504 1505 static void write_val_to_storage(struct bb_state *state, pseudo_t src, struct storage *storage) 1506 { 1507 struct hardreg *out; 1508 1509 switch (storage->type) { 1510 case REG_UDEF: 1511 alloc_stack(state, storage); 1512 default: 1513 output_insn(state, "movl %s,%s", show_pseudo(src), show_memop(storage)); 1514 break; 1515 case REG_REG: 1516 out = hardregs + storage->regno; 1517 output_insn(state, "movl %s,%s", show_pseudo(src), out->name); 1518 } 1519 } 1520 1521 static void fill_output(struct bb_state *state, pseudo_t pseudo, struct storage *out) 1522 { 1523 int i; 1524 struct storage_hash *in; 1525 struct instruction *def; 1526 1527 /* Is that pseudo a constant value? */ 1528 switch (pseudo->type) { 1529 case PSEUDO_VAL: 1530 write_val_to_storage(state, pseudo, out); 1531 return; 1532 case PSEUDO_REG: 1533 def = pseudo->def; 1534 if (def && def->opcode == OP_SETVAL) { 1535 write_val_to_storage(state, pseudo, out); 1536 return; 1537 } 1538 default: 1539 break; 1540 } 1541 1542 /* See if we have that pseudo in a register.. */ 1543 for (i = 0; i < REGNO; i++) { 1544 struct hardreg *reg = hardregs + i; 1545 pseudo_t p; 1546 1547 FOR_EACH_PTR(reg->contains, p) { 1548 if (p == pseudo) { 1549 write_reg_to_storage(state, reg, pseudo, out); 1550 return; 1551 } 1552 } END_FOR_EACH_PTR(p); 1553 } 1554 1555 /* Do we have it in another storage? */ 1556 in = find_storage_hash(pseudo, state->internal); 1557 if (!in) { 1558 in = find_storage_hash(pseudo, state->inputs); 1559 /* Undefined? */ 1560 if (!in) 1561 return; 1562 } 1563 switch (out->type) { 1564 case REG_UDEF: 1565 *out = *in->storage; 1566 break; 1567 case REG_REG: 1568 output_insn(state, "movl %s,%s", show_memop(in->storage), hardregs[out->regno].name); 1569 break; 1570 default: 1571 if (out == in->storage) 1572 break; 1573 if ((out->type == in->storage->type) && (out->regno == in->storage->regno)) 1574 break; 1575 output_insn(state, "movl %s,%s", show_memop(in->storage), show_memop(out)); 1576 break; 1577 } 1578 return; 1579 } 1580 1581 static int final_pseudo_flush(struct bb_state *state, pseudo_t pseudo, struct hardreg *reg) 1582 { 1583 struct storage_hash *hash; 1584 struct storage *out; 1585 struct hardreg *dst; 1586 1587 /* 1588 * Since this pseudo is live at exit, we'd better have output 1589 * storage for it.. 1590 */ 1591 hash = find_storage_hash(pseudo, state->outputs); 1592 if (!hash) 1593 return 1; 1594 out = hash->storage; 1595 1596 /* If the output is in a register, try to get it there.. */ 1597 if (out->type == REG_REG) { 1598 dst = hardregs + out->regno; 1599 /* 1600 * Two good cases: nobody is using the right register, 1601 * or we've already set it aside for output.. 1602 */ 1603 if (!dst->contains || dst->busy > VERY_BUSY) 1604 goto copy_to_dst; 1605 1606 /* Aiee. Try to keep it in a register.. */ 1607 dst = empty_reg(state); 1608 if (dst) 1609 goto copy_to_dst; 1610 1611 return 0; 1612 } 1613 1614 /* If the output is undefined, let's see if we can put it in a register.. */ 1615 if (out->type == REG_UDEF) { 1616 dst = empty_reg(state); 1617 if (dst) { 1618 out->type = REG_REG; 1619 out->regno = dst - hardregs; 1620 goto copy_to_dst; 1621 } 1622 /* Uhhuh. Not so good. No empty registers right now */ 1623 return 0; 1624 } 1625 1626 /* If we know we need to flush it, just do so already .. */ 1627 output_insn(state, "movl %s,%s", reg->name, show_memop(out)); 1628 return 1; 1629 1630 copy_to_dst: 1631 if (reg == dst) 1632 return 1; 1633 output_insn(state, "movl %s,%s", reg->name, dst->name); 1634 add_pseudo_reg(state, pseudo, dst); 1635 return 1; 1636 } 1637 1638 /* 1639 * This tries to make sure that we put all the pseudos that are 1640 * live on exit into the proper storage 1641 */ 1642 static void generate_output_storage(struct bb_state *state) 1643 { 1644 struct storage_hash *entry; 1645 1646 /* Go through the fixed outputs, making sure we have those regs free */ 1647 FOR_EACH_PTR(state->outputs, entry) { 1648 struct storage *out = entry->storage; 1649 if (out->type == REG_REG) { 1650 struct hardreg *reg = hardregs + out->regno; 1651 pseudo_t p; 1652 int flushme = 0; 1653 1654 reg->busy = REG_FIXED; 1655 FOR_EACH_PTR(reg->contains, p) { 1656 if (p == entry->pseudo) { 1657 flushme = -100; 1658 continue; 1659 } 1660 if (CURRENT_TAG(p) & TAG_DEAD) 1661 continue; 1662 1663 /* Try to write back the pseudo to where it should go ... */ 1664 if (final_pseudo_flush(state, p, reg)) { 1665 DELETE_CURRENT_PTR(p); 1666 continue; 1667 } 1668 flushme++; 1669 } END_FOR_EACH_PTR(p); 1670 PACK_PTR_LIST(®->contains); 1671 if (flushme > 0) 1672 flush_reg(state, reg); 1673 } 1674 } END_FOR_EACH_PTR(entry); 1675 1676 FOR_EACH_PTR(state->outputs, entry) { 1677 fill_output(state, entry->pseudo, entry->storage); 1678 } END_FOR_EACH_PTR(entry); 1679 } 1680 1681 static void generate(struct basic_block *bb, struct bb_state *state) 1682 { 1683 int i; 1684 struct storage_hash *entry; 1685 struct instruction *insn; 1686 1687 for (i = 0; i < REGNO; i++) { 1688 free_ptr_list(&hardregs[i].contains); 1689 hardregs[i].busy = 0; 1690 hardregs[i].dead = 0; 1691 hardregs[i].used = 0; 1692 } 1693 1694 FOR_EACH_PTR(state->inputs, entry) { 1695 struct storage *storage = entry->storage; 1696 const char *name = show_storage(storage); 1697 output_comment(state, "incoming %s in %s", show_pseudo(entry->pseudo), name); 1698 if (storage->type == REG_REG) { 1699 int regno = storage->regno; 1700 add_pseudo_reg(state, entry->pseudo, hardregs + regno); 1701 name = hardregs[regno].name; 1702 } 1703 } END_FOR_EACH_PTR(entry); 1704 1705 output_label(state, ".L%p", bb); 1706 FOR_EACH_PTR(bb->insns, insn) { 1707 if (!insn->bb) 1708 continue; 1709 generate_one_insn(insn, state); 1710 } END_FOR_EACH_PTR(insn); 1711 1712 if (verbose) { 1713 output_comment(state, "--- in ---"); 1714 FOR_EACH_PTR(state->inputs, entry) { 1715 output_comment(state, "%s <- %s", show_pseudo(entry->pseudo), show_storage(entry->storage)); 1716 } END_FOR_EACH_PTR(entry); 1717 output_comment(state, "--- spill ---"); 1718 FOR_EACH_PTR(state->internal, entry) { 1719 output_comment(state, "%s <-> %s", show_pseudo(entry->pseudo), show_storage(entry->storage)); 1720 } END_FOR_EACH_PTR(entry); 1721 output_comment(state, "--- out ---"); 1722 FOR_EACH_PTR(state->outputs, entry) { 1723 output_comment(state, "%s -> %s", show_pseudo(entry->pseudo), show_storage(entry->storage)); 1724 } END_FOR_EACH_PTR(entry); 1725 } 1726 printf("\n"); 1727 } 1728 1729 static void generate_list(struct basic_block_list *list, unsigned long generation) 1730 { 1731 struct basic_block *bb; 1732 FOR_EACH_PTR(list, bb) { 1733 if (bb->generation == generation) 1734 continue; 1735 output_bb(bb, generation); 1736 } END_FOR_EACH_PTR(bb); 1737 } 1738 1739 /* 1740 * Mark all the output registers of all the parents 1741 * as being "used" - this does not mean that we cannot 1742 * re-use them, but it means that we cannot ask the 1743 * parents to pass in another pseudo in one of those 1744 * registers that it already uses for another child. 1745 */ 1746 static void mark_used_registers(struct basic_block *bb, struct bb_state *state) 1747 { 1748 struct basic_block *parent; 1749 1750 FOR_EACH_PTR(bb->parents, parent) { 1751 struct storage_hash_list *outputs = gather_storage(parent, STOR_OUT); 1752 struct storage_hash *entry; 1753 1754 FOR_EACH_PTR(outputs, entry) { 1755 struct storage *s = entry->storage; 1756 if (s->type == REG_REG) { 1757 struct hardreg *reg = hardregs + s->regno; 1758 reg->used = 1; 1759 } 1760 } END_FOR_EACH_PTR(entry); 1761 } END_FOR_EACH_PTR(parent); 1762 } 1763 1764 static void output_bb(struct basic_block *bb, unsigned long generation) 1765 { 1766 struct bb_state state; 1767 1768 bb->generation = generation; 1769 1770 /* Make sure all parents have been generated first */ 1771 generate_list(bb->parents, generation); 1772 1773 state.pos = bb->pos; 1774 state.inputs = gather_storage(bb, STOR_IN); 1775 state.outputs = gather_storage(bb, STOR_OUT); 1776 state.internal = NULL; 1777 state.cc_opcode = 0; 1778 state.cc_target = NULL; 1779 1780 /* Mark incoming registers used */ 1781 mark_used_registers(bb, &state); 1782 1783 generate(bb, &state); 1784 1785 free_ptr_list(&state.inputs); 1786 free_ptr_list(&state.outputs); 1787 1788 /* Generate all children... */ 1789 generate_list(bb->children, generation); 1790 } 1791 1792 /* 1793 * We should set up argument sources here.. 1794 * 1795 * Things like "first three arguments in registers" etc 1796 * are all for this place. 1797 * 1798 * On x86, we default to stack, unless it's a static 1799 * function that doesn't have its address taken. 1800 * 1801 * I should implement the -mregparm=X cmd line option. 1802 */ 1803 static void set_up_arch_entry(struct entrypoint *ep, struct instruction *entry) 1804 { 1805 pseudo_t arg; 1806 struct symbol *sym, *argtype; 1807 int i, offset, regparm; 1808 1809 sym = ep->name; 1810 regparm = 0; 1811 if (!(sym->ctype.modifiers & MOD_ADDRESSABLE)) 1812 regparm = 3; 1813 sym = sym->ctype.base_type; 1814 i = 0; 1815 offset = 0; 1816 PREPARE_PTR_LIST(sym->arguments, argtype); 1817 FOR_EACH_PTR(entry->arg_list, arg) { 1818 struct storage *in = lookup_storage(entry->bb, arg, STOR_IN); 1819 if (!in) { 1820 in = alloc_storage(); 1821 add_storage(in, entry->bb, arg, STOR_IN); 1822 } 1823 if (i < regparm) { 1824 in->type = REG_REG; 1825 in->regno = i; 1826 } else { 1827 int bits = argtype ? argtype->bit_size : 0; 1828 1829 if (bits < bits_in_int) 1830 bits = bits_in_int; 1831 1832 in->type = REG_FRAME; 1833 in->offset = offset; 1834 1835 offset += bits_to_bytes(bits); 1836 } 1837 i++; 1838 NEXT_PTR_LIST(argtype); 1839 } END_FOR_EACH_PTR(arg); 1840 FINISH_PTR_LIST(argtype); 1841 } 1842 1843 /* 1844 * Set up storage information for "return" 1845 * 1846 * Not strictly necessary, since the code generator will 1847 * certainly move the return value to the right register, 1848 * but it can help register allocation if the allocator 1849 * sees that the target register is going to return in %eax. 1850 */ 1851 static void set_up_arch_exit(struct basic_block *bb, struct instruction *ret) 1852 { 1853 pseudo_t pseudo = ret->src; 1854 1855 if (pseudo && pseudo != VOID) { 1856 struct storage *out = lookup_storage(bb, pseudo, STOR_OUT); 1857 if (!out) { 1858 out = alloc_storage(); 1859 add_storage(out, bb, pseudo, STOR_OUT); 1860 } 1861 out->type = REG_REG; 1862 out->regno = 0; 1863 } 1864 } 1865 1866 /* 1867 * Set up dummy/silly output storage information for a switch 1868 * instruction. We need to make sure that a register is available 1869 * when we generate code for switch, so force that by creating 1870 * a dummy output rule. 1871 */ 1872 static void set_up_arch_switch(struct basic_block *bb, struct instruction *insn) 1873 { 1874 pseudo_t pseudo = insn->cond; 1875 struct storage *out = lookup_storage(bb, pseudo, STOR_OUT); 1876 if (!out) { 1877 out = alloc_storage(); 1878 add_storage(out, bb, pseudo, STOR_OUT); 1879 } 1880 out->type = REG_REG; 1881 out->regno = SWITCH_REG; 1882 } 1883 1884 static void arch_set_up_storage(struct entrypoint *ep) 1885 { 1886 struct basic_block *bb; 1887 1888 /* Argument storage etc.. */ 1889 set_up_arch_entry(ep, ep->entry); 1890 1891 FOR_EACH_PTR(ep->bbs, bb) { 1892 struct instruction *insn = last_instruction(bb->insns); 1893 if (!insn) 1894 continue; 1895 switch (insn->opcode) { 1896 case OP_RET: 1897 set_up_arch_exit(bb, insn); 1898 break; 1899 case OP_SWITCH: 1900 set_up_arch_switch(bb, insn); 1901 break; 1902 default: 1903 /* nothing */; 1904 } 1905 } END_FOR_EACH_PTR(bb); 1906 } 1907 1908 static void output(struct entrypoint *ep) 1909 { 1910 unsigned long generation = ++bb_generation; 1911 1912 last_reg = -1; 1913 stack_offset = 0; 1914 1915 /* Get rid of SSA form (phinodes etc) */ 1916 unssa(ep); 1917 1918 /* Set up initial inter-bb storage links */ 1919 set_up_storage(ep); 1920 1921 /* Architecture-specific storage rules.. */ 1922 arch_set_up_storage(ep); 1923 1924 /* Show the results ... */ 1925 output_bb(ep->entry->bb, generation); 1926 1927 /* Clear the storage hashes for the next function.. */ 1928 free_storage(); 1929 } 1930 1931 static int compile(struct symbol_list *list) 1932 { 1933 struct symbol *sym; 1934 FOR_EACH_PTR(list, sym) { 1935 struct entrypoint *ep; 1936 expand_symbol(sym); 1937 ep = linearize_symbol(sym); 1938 if (ep) 1939 output(ep); 1940 } END_FOR_EACH_PTR(sym); 1941 1942 return 0; 1943 } 1944 1945 int main(int argc, char **argv) 1946 { 1947 struct string_list *filelist = NULL; 1948 char *file; 1949 1950 compile(sparse_initialize(argc, argv, &filelist)); 1951 dbg_dead = 1; 1952 FOR_EACH_PTR_NOTAG(filelist, file) { 1953 compile(sparse(file)); 1954 } END_FOR_EACH_PTR_NOTAG(file); 1955 return 0; 1956 } 1957