blob: 521fa965b07b62d34c69c7c517c5ece3bdc5e2ea (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
|
#include "all.h"
/* Memory optimization:
*
* - replace alloced slots used only in
* load/store operations
* Assumption: all the accesses have the
* same size (this could be wrong...)
*/
/* require use, maintains use counts */
void
memopt(Fn *fn)
{
Blk *b;
Ins *i, *l;
Tmp *t;
Use *u, *ue;
int a;
b = fn->start;
for (i=b->ins; i-b->ins < b->nins; i++) {
if (OAlloc > i->op || i->op > OAlloc1)
continue;
/* specific to NAlign == 3 */
assert(rtype(i->to) == RTmp);
t = &fn->tmp[i->to.val];
for (u=t->use; u != &t->use[t->nuse]; u++) {
if (u->type != UIns)
goto NextIns;
l = u->u.ins;
if (!isload(l->op)
&& (!isstore(l->op) || req(i->to, l->arg[0])))
goto NextIns;
}
/* get rid of the alloc and replace uses */
*i = (Ins){.op = ONop};
t->ndef--;
ue = &t->use[t->nuse];
for (u=t->use; u!=ue; u++) {
l = u->u.ins;
if (isstore(l->op)) {
if (l->op == OStores)
l->cls = Kd;
else if (l->op == OStored)
l->cls = Kd;
else if (l->op == OStorel)
l->cls = Kl;
else
l->cls = Kw;
l->op = OCopy;
l->to = l->arg[1];
l->arg[1] = R;
t->nuse--;
t->ndef++;
} else
/* try to turn loads into copies so we
* can eliminate them later */
switch(l->op) {
case OLoad:
l->op = OCopy;
break;
case OLoadsw:
case OLoaduw:
l->cls = Kw;
l->op = OCopy;
break;
default:
/* keep l->cls */
a = l->op - OLoadsb;
l->op = OExtsb + a;
break;
}
}
NextIns:;
}
if (debug['M']) {
fprintf(stderr, "\n> After memory optimization:\n");
printfn(fn, stderr);
}
}
|