reg,pass: refactor allocation of aliased registers (#121)

Issue #100 demonstrated that register allocation for aliased registers is
fundamentally broken. The root of the issue is that currently accesses to the
same virtual register with different masks are treated as different registers.
This PR takes a different approach:

* Liveness analysis is masked: we now properly consider which parts of a register are live
* Register allocation produces a mapping from virtual to physical ID, and aliasing is applied later

In addition, a new pass ZeroExtend32BitOutputs accounts for the fact that 32-bit writes in 64-bit mode should actually be treated as 64-bit writes (the result is zero-extended).

Closes #100
This commit is contained in:
Michael McLoughlin
2020-01-22 22:50:40 -08:00
committed by GitHub
parent 126469f13d
commit f40d602170
33 changed files with 1241 additions and 362 deletions

View File

@@ -70,7 +70,13 @@ func Constraint(t buildtags.ConstraintConvertable) { ctx.Constraint(t) }
// constraint comments. // constraint comments.
func ConstraintExpr(expr string) { ctx.ConstraintExpr(expr) } func ConstraintExpr(expr string) { ctx.ConstraintExpr(expr) }
// GP8 allocates and returns a general-purpose 8-bit register. // GP8L allocates and returns a general-purpose 8-bit register (low byte).
func GP8L() reg.GPVirtual { return ctx.GP8L() }
// GP8H allocates and returns a general-purpose 8-bit register (high byte).
func GP8H() reg.GPVirtual { return ctx.GP8H() }
// GP8 allocates and returns a general-purpose 8-bit register (low byte).
func GP8() reg.GPVirtual { return ctx.GP8() } func GP8() reg.GPVirtual { return ctx.GP8() }
// GP16 allocates and returns a general-purpose 16-bit register. // GP16 allocates and returns a general-purpose 16-bit register.

View File

@@ -55,8 +55,8 @@ type Instruction struct {
Succ []*Instruction Succ []*Instruction
// LiveIn/LiveOut are sets of live register IDs pre/post execution. // LiveIn/LiveOut are sets of live register IDs pre/post execution.
LiveIn reg.Set LiveIn reg.MaskSet
LiveOut reg.Set LiveOut reg.MaskSet
} }
func (i *Instruction) node() {} func (i *Instruction) node() {}

View File

@@ -141,10 +141,10 @@ func Registers(op Op) []reg.Register {
func ApplyAllocation(op Op, a reg.Allocation) Op { func ApplyAllocation(op Op, a reg.Allocation) Op {
switch op := op.(type) { switch op := op.(type) {
case reg.Register: case reg.Register:
return a.LookupDefault(op) return a.LookupRegisterDefault(op)
case Mem: case Mem:
op.Base = a.LookupDefault(op.Base) op.Base = a.LookupRegisterDefault(op.Base)
op.Index = a.LookupDefault(op.Index) op.Index = a.LookupRegisterDefault(op.Index)
return op return op
} }
return op return op

View File

@@ -3,6 +3,7 @@ package pass
import ( import (
"errors" "errors"
"math" "math"
"sort"
"github.com/mmcloughlin/avo/reg" "github.com/mmcloughlin/avo/reg"
) )
@@ -10,28 +11,43 @@ import (
// edge is an edge of the interference graph, indicating that registers X and Y // edge is an edge of the interference graph, indicating that registers X and Y
// must be in non-conflicting registers. // must be in non-conflicting registers.
type edge struct { type edge struct {
X, Y reg.Register X, Y reg.ID
} }
// Allocator is a graph-coloring register allocator. // Allocator is a graph-coloring register allocator.
type Allocator struct { type Allocator struct {
registers []reg.Physical registers []reg.ID
allocation reg.Allocation allocation reg.Allocation
edges []*edge edges []*edge
possible map[reg.Virtual][]reg.Physical possible map[reg.ID][]reg.ID
vidtopid map[reg.VID]reg.PID
} }
// NewAllocator builds an allocator for the given physical registers. // NewAllocator builds an allocator for the given physical registers.
func NewAllocator(rs []reg.Physical) (*Allocator, error) { func NewAllocator(rs []reg.Physical) (*Allocator, error) {
if len(rs) == 0 { // Set of IDs, excluding restricted registers.
return nil, errors.New("no registers") idset := map[reg.ID]bool{}
for _, r := range rs {
if (r.Info() & reg.Restricted) != 0 {
continue
}
idset[r.ID()] = true
} }
if len(idset) == 0 {
return nil, errors.New("no allocatable registers")
}
// Produce slice of unique register IDs.
var ids []reg.ID
for id := range idset {
ids = append(ids, id)
}
sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] })
return &Allocator{ return &Allocator{
registers: rs, registers: ids,
allocation: reg.NewEmptyAllocation(), allocation: reg.NewEmptyAllocation(),
possible: map[reg.Virtual][]reg.Physical{}, possible: map[reg.ID][]reg.ID{},
vidtopid: map[reg.VID]reg.PID{},
}, nil }, nil
} }
@@ -45,23 +61,24 @@ func NewAllocatorForKind(k reg.Kind) (*Allocator, error) {
} }
// AddInterferenceSet records that r interferes with every register in s. Convenience wrapper around AddInterference. // AddInterferenceSet records that r interferes with every register in s. Convenience wrapper around AddInterference.
func (a *Allocator) AddInterferenceSet(r reg.Register, s reg.Set) { func (a *Allocator) AddInterferenceSet(r reg.Register, s reg.MaskSet) {
for y := range s { for id, mask := range s {
a.AddInterference(r, y) if (r.Mask() & mask) != 0 {
a.AddInterference(r.ID(), id)
}
} }
} }
// AddInterference records that x and y must be assigned to non-conflicting physical registers. // AddInterference records that x and y must be assigned to non-conflicting physical registers.
func (a *Allocator) AddInterference(x, y reg.Register) { func (a *Allocator) AddInterference(x, y reg.ID) {
a.Add(x) a.Add(x)
a.Add(y) a.Add(y)
a.edges = append(a.edges, &edge{X: x, Y: y}) a.edges = append(a.edges, &edge{X: x, Y: y})
} }
// Add adds a register to be allocated. Does nothing if the register has already been added. // Add adds a register to be allocated. Does nothing if the register has already been added.
func (a *Allocator) Add(r reg.Register) { func (a *Allocator) Add(v reg.ID) {
v, ok := r.(reg.Virtual) if !v.IsVirtual() {
if !ok {
return return
} }
if _, found := a.possible[v]; found { if _, found := a.possible[v]; found {
@@ -91,35 +108,22 @@ func (a *Allocator) Allocate() (reg.Allocation, error) {
// update possible allocations based on edges. // update possible allocations based on edges.
func (a *Allocator) update() error { func (a *Allocator) update() error {
for v := range a.possible {
pid, found := a.vidtopid[v.VirtualID()]
if !found {
continue
}
a.possible[v] = filterregisters(a.possible[v], func(r reg.Physical) bool {
return r.PhysicalID() == pid
})
}
var rem []*edge var rem []*edge
for _, e := range a.edges { for _, e := range a.edges {
e.X, e.Y = a.allocation.LookupDefault(e.X), a.allocation.LookupDefault(e.Y) x := a.allocation.LookupDefault(e.X)
y := a.allocation.LookupDefault(e.Y)
px, py := reg.ToPhysical(e.X), reg.ToPhysical(e.Y)
vx, vy := reg.ToVirtual(e.X), reg.ToVirtual(e.Y)
switch { switch {
case vx != nil && vy != nil: case x.IsVirtual() && y.IsVirtual():
rem = append(rem, e) rem = append(rem, e)
continue continue
case px != nil && py != nil: case x.IsPhysical() && y.IsPhysical():
if reg.AreConflicting(px, py) { if x == y {
return errors.New("impossible register allocation") return errors.New("impossible register allocation")
} }
case px != nil && vy != nil: case x.IsPhysical() && y.IsVirtual():
a.discardconflicting(vy, px) a.discardconflicting(y, x)
case vx != nil && py != nil: case x.IsVirtual() && y.IsPhysical():
a.discardconflicting(vx, py) a.discardconflicting(x, y)
default: default:
panic("unreachable") panic("unreachable")
} }
@@ -130,30 +134,29 @@ func (a *Allocator) update() error {
} }
// mostrestricted returns the virtual register with the least possibilities. // mostrestricted returns the virtual register with the least possibilities.
func (a *Allocator) mostrestricted() reg.Virtual { func (a *Allocator) mostrestricted() reg.ID {
n := int(math.MaxInt32) n := int(math.MaxInt32)
var v reg.Virtual var v reg.ID
for r, p := range a.possible { for w, p := range a.possible {
if len(p) < n || (len(p) == n && v != nil && r.VirtualID() < v.VirtualID()) { // On a tie, choose the smallest ID in numeric order. This avoids
// non-deterministic allocations due to map iteration order.
if len(p) < n || (len(p) == n && w < v) {
n = len(p) n = len(p)
v = r v = w
} }
} }
return v return v
} }
// discardconflicting removes registers from vs possible list that conflict with p. // discardconflicting removes registers from vs possible list that conflict with p.
func (a *Allocator) discardconflicting(v reg.Virtual, p reg.Physical) { func (a *Allocator) discardconflicting(v, p reg.ID) {
a.possible[v] = filterregisters(a.possible[v], func(r reg.Physical) bool { a.possible[v] = filterregisters(a.possible[v], func(r reg.ID) bool {
if pid, found := a.vidtopid[v.VirtualID()]; found && pid == p.PhysicalID() { return r != p
return true
}
return !reg.AreConflicting(r, p)
}) })
} }
// alloc attempts to allocate a register to v. // alloc attempts to allocate a register to v.
func (a *Allocator) alloc(v reg.Virtual) error { func (a *Allocator) alloc(v reg.ID) error {
ps := a.possible[v] ps := a.possible[v]
if len(ps) == 0 { if len(ps) == 0 {
return errors.New("failed to allocate registers") return errors.New("failed to allocate registers")
@@ -161,7 +164,6 @@ func (a *Allocator) alloc(v reg.Virtual) error {
p := ps[0] p := ps[0]
a.allocation[v] = p a.allocation[v] = p
delete(a.possible, v) delete(a.possible, v)
a.vidtopid[v.VirtualID()] = p.PhysicalID()
return nil return nil
} }
@@ -171,14 +173,14 @@ func (a *Allocator) remaining() int {
} }
// possibleregisters returns all allocate-able registers for the given virtual. // possibleregisters returns all allocate-able registers for the given virtual.
func (a *Allocator) possibleregisters(v reg.Virtual) []reg.Physical { func (a *Allocator) possibleregisters(v reg.ID) []reg.ID {
return filterregisters(a.registers, func(r reg.Physical) bool { return filterregisters(a.registers, func(r reg.ID) bool {
return v.SatisfiedBy(r) && (r.Info()&reg.Restricted) == 0 return v.Kind() == r.Kind()
}) })
} }
func filterregisters(in []reg.Physical, predicate func(reg.Physical) bool) []reg.Physical { func filterregisters(in []reg.ID, predicate func(reg.ID) bool) []reg.ID {
var rs []reg.Physical var rs []reg.ID
for _, r := range in { for _, r := range in {
if predicate(r) { if predicate(r) {
rs = append(rs, r) rs = append(rs, r)

View File

@@ -15,9 +15,9 @@ func TestAllocatorSimple(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
a.Add(x) a.Add(x.ID())
a.Add(y) a.Add(y.ID())
a.AddInterference(x, y) a.AddInterference(x.ID(), y.ID())
alloc, err := a.Allocate() alloc, err := a.Allocate()
if err != nil { if err != nil {
@@ -26,7 +26,7 @@ func TestAllocatorSimple(t *testing.T) {
t.Log(alloc) t.Log(alloc)
if alloc[x] != reg.X0 || alloc[y] != reg.Y1 { if alloc.LookupRegister(x) != reg.X0 || alloc.LookupRegister(y) != reg.Y1 {
t.Fatalf("unexpected allocation") t.Fatalf("unexpected allocation")
} }
} }
@@ -37,7 +37,7 @@ func TestAllocatorImpossible(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
a.AddInterference(reg.X7, reg.Z7) a.AddInterference(reg.X7.ID(), reg.Z7.ID())
_, err = a.Allocate() _, err = a.Allocate()
if err == nil { if err == nil {

View File

@@ -16,6 +16,7 @@ var Compile = Concat(
FunctionPass(PruneDanglingLabels), FunctionPass(PruneDanglingLabels),
FunctionPass(LabelTarget), FunctionPass(LabelTarget),
FunctionPass(CFG), FunctionPass(CFG),
InstructionPass(ZeroExtend32BitOutputs),
FunctionPass(Liveness), FunctionPass(Liveness),
FunctionPass(AllocateRegisters), FunctionPass(AllocateRegisters),
FunctionPass(BindRegisters), FunctionPass(BindRegisters),

View File

@@ -8,6 +8,24 @@ import (
"github.com/mmcloughlin/avo/reg" "github.com/mmcloughlin/avo/reg"
) )
// ZeroExtend32BitOutputs applies the rule that "32-bit operands generate a
// 32-bit result, zero-extended to a 64-bit result in the destination
// general-purpose register" (Intel Software Developers Manual, Volume 1,
// 3.4.1.1).
func ZeroExtend32BitOutputs(i *ir.Instruction) error {
for j, op := range i.Outputs {
if !operand.IsR32(op) {
continue
}
r, ok := op.(reg.GP)
if !ok {
panic("r32 operand should satisfy reg.GP")
}
i.Outputs[j] = r.As64()
}
return nil
}
// Liveness computes register liveness. // Liveness computes register liveness.
func Liveness(fn *ir.Function) error { func Liveness(fn *ir.Function) error {
// Note this implementation is initially naive so as to be "obviously correct". // Note this implementation is initially naive so as to be "obviously correct".
@@ -23,8 +41,8 @@ func Liveness(fn *ir.Function) error {
// Initialize. // Initialize.
for _, i := range is { for _, i := range is {
i.LiveIn = reg.NewSetFromSlice(i.InputRegisters()) i.LiveIn = reg.NewMaskSetFromRegisters(i.InputRegisters())
i.LiveOut = reg.NewEmptySet() i.LiveOut = reg.NewEmptyMaskSet()
} }
// Iterative dataflow analysis. // Iterative dataflow analysis.
@@ -33,29 +51,16 @@ func Liveness(fn *ir.Function) error {
for _, i := range is { for _, i := range is {
// out[n] = UNION[s IN succ[n]] in[s] // out[n] = UNION[s IN succ[n]] in[s]
nout := len(i.LiveOut)
for _, s := range i.Succ { for _, s := range i.Succ {
if s == nil { if s == nil {
continue continue
} }
i.LiveOut.Update(s.LiveIn) changes = i.LiveOut.Update(s.LiveIn) || changes
}
if len(i.LiveOut) != nout {
changes = true
} }
// in[n] = use[n] UNION (out[n] - def[n]) // in[n] = use[n] UNION (out[n] - def[n])
nin := len(i.LiveIn) def := reg.NewMaskSetFromRegisters(i.OutputRegisters())
def := reg.NewSetFromSlice(i.OutputRegisters()) changes = i.LiveIn.Update(i.LiveOut.Difference(def)) || changes
i.LiveIn.Update(i.LiveOut.Difference(def))
for r := range i.LiveOut {
if _, found := def[r]; !found {
i.LiveIn.Add(r)
}
}
if len(i.LiveIn) != nin {
changes = true
}
} }
if !changes { if !changes {
@@ -80,7 +85,7 @@ func AllocateRegisters(fn *ir.Function) error {
} }
as[k] = a as[k] = a
} }
as[k].Add(r) as[k].Add(r.ID())
} }
} }
@@ -89,7 +94,7 @@ func AllocateRegisters(fn *ir.Function) error {
for _, d := range i.OutputRegisters() { for _, d := range i.OutputRegisters() {
k := d.Kind() k := d.Kind()
out := i.LiveOut.OfKind(k) out := i.LiveOut.OfKind(k)
out.Discard(d) out.DiscardRegister(d)
as[k].AddInterferenceSet(d, out) as[k].AddInterferenceSet(d, out)
} }
} }

View File

@@ -3,15 +3,60 @@ package pass_test
import ( import (
"testing" "testing"
"github.com/mmcloughlin/avo/ir"
"github.com/mmcloughlin/avo/reg"
"github.com/mmcloughlin/avo/pass"
"github.com/mmcloughlin/avo/build" "github.com/mmcloughlin/avo/build"
"github.com/mmcloughlin/avo/ir"
"github.com/mmcloughlin/avo/operand" "github.com/mmcloughlin/avo/operand"
"github.com/mmcloughlin/avo/pass"
"github.com/mmcloughlin/avo/reg"
) )
func TestZeroExtend32BitOutputs(t *testing.T) {
collection := reg.NewCollection()
v16 := collection.GP16()
v32 := collection.GP32()
i := &ir.Instruction{
Outputs: []operand.Op{
reg.R8B,
reg.R9W,
reg.R10L,
reg.R11,
v16,
v32,
},
}
err := pass.ZeroExtend32BitOutputs(i)
if err != nil {
t.Fatal(err)
}
got := i.Outputs
expect := []reg.Register{
reg.R8B,
reg.R9W,
reg.R10, // converted from R10L
reg.R11,
v16,
v32.As64(), // converted from 32-bit
}
if len(expect) != len(got) {
t.Fatal("length mismatch")
}
for j := range got {
r, ok := got[j].(reg.Register)
if !ok {
t.Fatalf("expected register; got %s", got[j].Asm())
}
if !reg.Equal(expect[j], r) {
t.Fatalf("got %s; expect %s", expect[j].Asm(), r.Asm())
}
}
}
func TestLivenessBasic(t *testing.T) { func TestLivenessBasic(t *testing.T) {
// Build: a = 1, b = 2, a = a+b // Build: a = 1, b = 2, a = a+b
ctx := build.NewContext() ctx := build.NewContext()
@@ -50,8 +95,8 @@ func AssertLiveness(t *testing.T, ctx *build.Context, in, out [][]reg.Register)
} }
} }
func AssertRegistersMatchSet(t *testing.T, rs []reg.Register, s reg.Set) { func AssertRegistersMatchSet(t *testing.T, rs []reg.Register, s reg.MaskSet) {
if !s.Equals(reg.NewSetFromSlice(rs)) { if !s.Equals(reg.NewMaskSetFromRegisters(rs)) {
t.Fatalf("register slice does not match set: %#v and %#v", rs, s) t.Fatalf("register slice does not match set: %#v and %#v", rs, s)
} }
} }

View File

@@ -3,46 +3,52 @@ package reg
// Collection represents a collection of virtual registers. This is primarily // Collection represents a collection of virtual registers. This is primarily
// useful for allocating virtual registers with distinct IDs. // useful for allocating virtual registers with distinct IDs.
type Collection struct { type Collection struct {
vid map[Kind]VID idx map[Kind]Index
} }
// NewCollection builds an empty register collection. // NewCollection builds an empty register collection.
func NewCollection() *Collection { func NewCollection() *Collection {
return &Collection{ return &Collection{
vid: map[Kind]VID{}, idx: map[Kind]Index{},
} }
} }
// VirtualRegister allocates and returns a new virtual register of the given kind and width. // VirtualRegister allocates and returns a new virtual register of the given kind and width.
func (c *Collection) VirtualRegister(k Kind, w Width) Virtual { func (c *Collection) VirtualRegister(k Kind, s Spec) Virtual {
vid := c.vid[k] idx := c.idx[k]
c.vid[k]++ c.idx[k]++
return NewVirtual(vid, k, w) return NewVirtual(idx, k, s)
} }
// GP8 allocates and returns a general-purpose 8-bit register. // GP8L allocates and returns a general-purpose 8-bit register (low byte).
func (c *Collection) GP8() GPVirtual { return c.GP(B8) } func (c *Collection) GP8L() GPVirtual { return c.GP(S8L) }
// GP8H allocates and returns a general-purpose 8-bit register (high byte).
func (c *Collection) GP8H() GPVirtual { return c.GP(S8H) }
// GP8 allocates and returns a general-purpose 8-bit register (low byte).
func (c *Collection) GP8() GPVirtual { return c.GP8L() }
// GP16 allocates and returns a general-purpose 16-bit register. // GP16 allocates and returns a general-purpose 16-bit register.
func (c *Collection) GP16() GPVirtual { return c.GP(B16) } func (c *Collection) GP16() GPVirtual { return c.GP(S16) }
// GP32 allocates and returns a general-purpose 32-bit register. // GP32 allocates and returns a general-purpose 32-bit register.
func (c *Collection) GP32() GPVirtual { return c.GP(B32) } func (c *Collection) GP32() GPVirtual { return c.GP(S32) }
// GP64 allocates and returns a general-purpose 64-bit register. // GP64 allocates and returns a general-purpose 64-bit register.
func (c *Collection) GP64() GPVirtual { return c.GP(B64) } func (c *Collection) GP64() GPVirtual { return c.GP(S64) }
// GP allocates and returns a general-purpose register of the given width. // GP allocates and returns a general-purpose register of the given width.
func (c *Collection) GP(w Width) GPVirtual { return newgpv(c.VirtualRegister(KindGP, w)) } func (c *Collection) GP(s Spec) GPVirtual { return newgpv(c.VirtualRegister(KindGP, s)) }
// XMM allocates and returns a 128-bit vector register. // XMM allocates and returns a 128-bit vector register.
func (c *Collection) XMM() VecVirtual { return c.Vec(B128) } func (c *Collection) XMM() VecVirtual { return c.Vec(S128) }
// YMM allocates and returns a 256-bit vector register. // YMM allocates and returns a 256-bit vector register.
func (c *Collection) YMM() VecVirtual { return c.Vec(B256) } func (c *Collection) YMM() VecVirtual { return c.Vec(S256) }
// ZMM allocates and returns a 512-bit vector register. // ZMM allocates and returns a 512-bit vector register.
func (c *Collection) ZMM() VecVirtual { return c.Vec(B512) } func (c *Collection) ZMM() VecVirtual { return c.Vec(S512) }
// Vec allocates and returns a vector register of the given width. // Vec allocates and returns a vector register of the given width.
func (c *Collection) Vec(w Width) VecVirtual { return newvecv(c.VirtualRegister(KindVector, w)) } func (c *Collection) Vec(s Spec) VecVirtual { return newvecv(c.VirtualRegister(KindVector, s)) }

View File

@@ -1,6 +1,40 @@
package reg package reg
import "testing" import (
"testing"
"testing/quick"
)
func TestIDFields(t *testing.T) {
f := func(v uint8, kind Kind, idx Index) bool {
id := newid(v, kind, idx)
return id.Kind() == kind && id.Index() == idx
}
if err := quick.Check(f, nil); err != nil {
t.Fatal(err)
}
}
func TestIDIsVirtual(t *testing.T) {
cases := []Virtual{
GeneralPurpose.Virtual(42, S64),
Vector.Virtual(42, S128),
}
for _, r := range cases {
if !r.ID().IsVirtual() {
t.FailNow()
}
}
}
func TestIDIsPhysical(t *testing.T) {
cases := []Physical{AL, AH, AX, EAX, RAX, X1, Y2, Z31}
for _, r := range cases {
if !r.ID().IsPhysical() {
t.FailNow()
}
}
}
func TestSpecSize(t *testing.T) { func TestSpecSize(t *testing.T) {
cases := []struct { cases := []struct {
@@ -25,7 +59,7 @@ func TestSpecSize(t *testing.T) {
} }
func TestToVirtual(t *testing.T) { func TestToVirtual(t *testing.T) {
v := GeneralPurpose.Virtual(42, B32) v := GeneralPurpose.Virtual(42, S32)
if ToVirtual(v) != v { if ToVirtual(v) != v {
t.Errorf("ToVirtual(v) != v for virtual register") t.Errorf("ToVirtual(v) != v for virtual register")
} }
@@ -35,7 +69,7 @@ func TestToVirtual(t *testing.T) {
} }
func TestToPhysical(t *testing.T) { func TestToPhysical(t *testing.T) {
v := GeneralPurpose.Virtual(42, B32) v := GeneralPurpose.Virtual(42, S32)
if ToPhysical(v) != nil { if ToPhysical(v) != nil {
t.Errorf("ToPhysical should be nil for virtual registers") t.Errorf("ToPhysical should be nil for virtual registers")
} }
@@ -44,31 +78,10 @@ func TestToPhysical(t *testing.T) {
} }
} }
func TestAreConflicting(t *testing.T) {
cases := []struct {
X, Y Physical
Expect bool
}{
{ECX, X3, false},
{AL, AH, false},
{AL, AX, true},
{AL, BX, false},
{X3, Y4, false},
{X3, Y3, true},
{Y3, Z4, false},
{Y3, Z3, true},
}
for _, c := range cases {
if AreConflicting(c.X, c.Y) != c.Expect {
t.Errorf("AreConflicting(%s, %s) != %v", c.X, c.Y, c.Expect)
}
}
}
func TestFamilyLookup(t *testing.T) { func TestFamilyLookup(t *testing.T) {
cases := []struct { cases := []struct {
Family *Family Family *Family
ID PID ID Index
Spec Spec Spec Spec
Expect Physical Expect Physical
}{ }{
@@ -89,7 +102,7 @@ func TestFamilyLookup(t *testing.T) {
for _, c := range cases { for _, c := range cases {
got := c.Family.Lookup(c.ID, c.Spec) got := c.Family.Lookup(c.ID, c.Spec)
if got != c.Expect { if got != c.Expect {
t.Errorf("pid=%v spec=%v: lookup got %v expect %v", c.ID, c.Spec, got, c.Expect) t.Errorf("idx=%v spec=%v: lookup got %v expect %v", c.ID, c.Spec, got, c.Expect)
} }
} }
} }
@@ -117,21 +130,45 @@ func TestPhysicalAs(t *testing.T) {
} }
func TestVirtualAs(t *testing.T) { func TestVirtualAs(t *testing.T) {
cases := []struct { v := GeneralPurpose.Virtual(0, S64)
Virtual Register specs := []Spec{S8, S8L, S8H, S16, S32, S64}
Physical Physical for _, s := range specs {
Match bool if v.as(s).Mask() != s.Mask() {
}{ t.FailNow()
{GeneralPurpose.Virtual(0, B8), CL, true}, }
{GeneralPurpose.Virtual(0, B8), CH, true}, }
{GeneralPurpose.Virtual(0, B32).as(S8L), CL, true}, }
{GeneralPurpose.Virtual(0, B32).as(S8L), CH, false},
{GeneralPurpose.Virtual(0, B16).as(S32), R9L, true}, func TestLookupPhysical(t *testing.T) {
{GeneralPurpose.Virtual(0, B16).as(S32), R9, false}, cases := []struct {
} Kind Kind
for _, c := range cases { Index Index
if c.Virtual.(Virtual).SatisfiedBy(c.Physical) != c.Match { Spec Spec
t.Errorf("%s.SatisfiedBy(%v) != %v", c.Virtual.Asm(), c.Physical, c.Match) Expect Physical
}{
{KindGP, 0, S8L, AL},
{KindGP, 1, S8H, CH},
{KindGP, 7, S8, DIB},
{KindGP, 8, S16, R8W},
{KindGP, 9, S32, R9L},
{KindGP, 10, S64, R10},
{KindVector, 7, S128, X7},
{KindVector, 17, S256, Y17},
{KindVector, 27, S512, Z27},
}
for _, c := range cases {
if got := LookupPhysical(c.Kind, c.Index, c.Spec); !Equal(got, c.Expect) {
t.FailNow()
}
}
}
func TestLookupIDSelf(t *testing.T) {
cases := []Physical{AL, AH, AX, EAX, RAX, X1, Y2, Z31}
for _, r := range cases {
if got := LookupID(r.ID(), r.spec()); !Equal(got, r) {
t.FailNow()
} }
} }
} }

View File

@@ -1,69 +1,99 @@
package reg package reg
// Set is a set of registers. // MaskSet maps register IDs to masks.
type Set map[Register]bool type MaskSet map[ID]uint16
// NewEmptySet builds an empty register set. // NewEmptyMaskSet builds an empty register mask set.
func NewEmptySet() Set { func NewEmptyMaskSet() MaskSet {
return Set{} return MaskSet{}
} }
// NewSetFromSlice forms a set from the given register list. // NewMaskSetFromRegisters forms a mask set from the given register list.
func NewSetFromSlice(rs []Register) Set { func NewMaskSetFromRegisters(rs []Register) MaskSet {
s := NewEmptySet() s := NewEmptyMaskSet()
for _, r := range rs { for _, r := range rs {
s.Add(r) s.AddRegister(r)
} }
return s return s
} }
// Clone returns a copy of s. // Clone returns a copy of s.
func (s Set) Clone() Set { func (s MaskSet) Clone() MaskSet {
c := NewEmptySet() c := NewEmptyMaskSet()
for r := range s { for id, mask := range s {
c.Add(r) c.Add(id, mask)
} }
return c return c
} }
// Add r to s. // Add mask to the given register ID.
func (s Set) Add(r Register) { // Reports whether this made any change to the set.
s[r] = true func (s MaskSet) Add(id ID, mask uint16) bool {
} if (s[id] & mask) == mask {
return false
// Discard removes r from s, if present.
func (s Set) Discard(r Register) {
delete(s, r)
}
// Update adds every register in t to s.
func (s Set) Update(t Set) {
for r := range t {
s.Add(r)
} }
s[id] |= mask
return true
}
// AddRegister is a convenience for adding the register's (ID, mask) to the set.
// Reports whether this made any change to the set.
func (s MaskSet) AddRegister(r Register) bool {
return s.Add(r.ID(), r.Mask())
}
// Discard clears masked bits from register ID.
// Reports whether this made any change to the set.
func (s MaskSet) Discard(id ID, mask uint16) bool {
if curr, found := s[id]; !found || (curr&mask) == 0 {
return false
}
s[id] &^= mask
if s[id] == 0 {
delete(s, id)
}
return true
}
// DiscardRegister is a convenience for discarding the register's (ID, mask) from the set.
// Reports whether this made any change to the set.
func (s MaskSet) DiscardRegister(r Register) bool {
return s.Discard(r.ID(), r.Mask())
}
// Update adds masks in t to s.
// Reports whether this made any change to the set.
func (s MaskSet) Update(t MaskSet) bool {
change := false
for id, mask := range t {
change = s.Add(id, mask) || change
}
return change
} }
// Difference returns the set of registers in s but not t. // Difference returns the set of registers in s but not t.
func (s Set) Difference(t Set) Set { func (s MaskSet) Difference(t MaskSet) MaskSet {
d := s.Clone() d := s.Clone()
d.DifferenceUpdate(t) d.DifferenceUpdate(t)
return d return d
} }
// DifferenceUpdate removes every element of t from s. // DifferenceUpdate removes every element of t from s.
func (s Set) DifferenceUpdate(t Set) { func (s MaskSet) DifferenceUpdate(t MaskSet) bool {
for r := range t { change := false
s.Discard(r) for id, mask := range t {
change = s.Discard(id, mask) || change
} }
return change
} }
// Equals returns true if s and t contain the same registers. // Equals returns true if s and t contain the same masks.
func (s Set) Equals(t Set) bool { func (s MaskSet) Equals(t MaskSet) bool {
if len(s) != len(t) { if len(s) != len(t) {
return false return false
} }
for r := range s { for id, mask := range s {
if _, found := t[r]; !found { if _, found := t[id]; !found || mask != t[id] {
return false return false
} }
} }
@@ -71,11 +101,11 @@ func (s Set) Equals(t Set) bool {
} }
// OfKind returns the set of elements of s with kind k. // OfKind returns the set of elements of s with kind k.
func (s Set) OfKind(k Kind) Set { func (s MaskSet) OfKind(k Kind) MaskSet {
t := NewEmptySet() t := NewEmptyMaskSet()
for r := range s { for id, mask := range s {
if r.Kind() == k { if id.Kind() == k {
t.Add(r) t.Add(id, mask)
} }
} }
return t return t

View File

@@ -1,41 +0,0 @@
package reg
import "testing"
func TestSetRegisterIdentity(t *testing.T) {
rs := []Register{
NewVirtual(42, KindGP, B32),
NewVirtual(43, KindGP, B32),
NewVirtual(42, KindVector, B32),
NewVirtual(42, KindGP, B64),
AL, AH, CL,
AX, R13W,
EDX, R9L,
RCX, R14,
X1, X7,
Y4, Y9,
Z13, Z31,
}
s := NewEmptySet()
for _, r := range rs {
s.Add(r)
s.Add(r)
}
if len(s) != len(rs) {
t.Fatalf("expected set to have same size as slice: got %d expect %d", len(s), len(rs))
}
}
func TestSetFamilyRegisters(t *testing.T) {
fs := []*Family{GeneralPurpose, Vector}
s := NewEmptySet()
expect := 0
for _, f := range fs {
s.Update(f.Set())
s.Add(f.Virtual(42, B64))
expect += len(f.Registers()) + 1
}
if len(s) != expect {
t.Fatalf("set size mismatch: %d expected %d", len(s), expect)
}
}

View File

@@ -5,26 +5,12 @@ import (
"fmt" "fmt"
) )
// Width is a register width.
type Width uint
// Typical register width values.
const (
B8 Width = 1 << iota
B16
B32
B64
B128
B256
B512
)
// Size returns the register width in bytes.
func (w Width) Size() uint { return uint(w) }
// Kind is a class of registers. // Kind is a class of registers.
type Kind uint8 type Kind uint8
// Index of a register within a kind.
type Index uint16
// Family is a collection of Physical registers of a common kind. // Family is a collection of Physical registers of a common kind.
type Family struct { type Family struct {
Kind Kind Kind Kind
@@ -32,8 +18,8 @@ type Family struct {
} }
// define builds a register and adds it to the Family. // define builds a register and adds it to the Family.
func (f *Family) define(s Spec, id PID, name string, flags ...Info) Physical { func (f *Family) define(s Spec, idx Index, name string, flags ...Info) Physical {
r := newregister(f, s, id, name, flags...) r := newregister(f, s, idx, name, flags...)
f.add(r) f.add(r)
return r return r
} }
@@ -47,8 +33,8 @@ func (f *Family) add(r Physical) {
} }
// Virtual returns a virtual register from this family's kind. // Virtual returns a virtual register from this family's kind.
func (f *Family) Virtual(id VID, w Width) Virtual { func (f *Family) Virtual(idx Index, s Spec) Virtual {
return NewVirtual(id, f.Kind, w) return NewVirtual(idx, f.Kind, s)
} }
// Registers returns the registers in this family. // Registers returns the registers in this family.
@@ -56,41 +42,56 @@ func (f *Family) Registers() []Physical {
return append([]Physical(nil), f.registers...) return append([]Physical(nil), f.registers...)
} }
// Set returns the set of registers in the family. // Lookup returns the register with given physical index and spec. Returns nil if no such register exists.
func (f *Family) Set() Set { func (f *Family) Lookup(idx Index, s Spec) Physical {
s := NewEmptySet()
for _, r := range f.registers { for _, r := range f.registers {
s.Add(r) if r.PhysicalIndex() == idx && r.Mask() == s.Mask() {
}
return s
}
// Lookup returns the register with given physical ID and spec. Returns nil if no such register exists.
func (f *Family) Lookup(id PID, s Spec) Physical {
for _, r := range f.registers {
if r.PhysicalID() == id && r.Mask() == s.Mask() {
return r return r
} }
} }
return nil return nil
} }
// ID is a register identifier.
type ID uint32
// newid builds a new register ID from the virtual flag v, kind and index.
func newid(v uint8, kind Kind, idx Index) ID {
return ID(v) | (ID(kind) << 8) | (ID(idx) << 16)
}
// IsVirtual reports whether this is an ID for a virtual register.
func (id ID) IsVirtual() bool { return (id & 1) == 1 }
// IsPhysical reports whether this is an ID for a physical register.
func (id ID) IsPhysical() bool { return !id.IsVirtual() }
// Kind extracts the kind from the register ID.
func (id ID) Kind() Kind { return Kind(id >> 8) }
// Index extracts the index from the register ID.
func (id ID) Index() Index { return Index(id >> 16) }
// Register represents a virtual or physical register. // Register represents a virtual or physical register.
type Register interface { type Register interface {
ID() ID
Kind() Kind Kind() Kind
Size() uint Size() uint
Mask() uint16
Asm() string Asm() string
as(Spec) Register as(Spec) Register
spec() Spec
register() register()
} }
// VID is a virtual register ID. // Equal reports whether a and b are equal registers.
type VID uint16 func Equal(a, b Register) bool {
return (a.ID() == b.ID()) && (a.Mask() == b.Mask())
}
// Virtual is a register of a given type and size, not yet allocated to a physical register. // Virtual is a register of a given type and size, not yet allocated to a physical register.
type Virtual interface { type Virtual interface {
VirtualID() VID VirtualIndex() Index
SatisfiedBy(Physical) bool
Register Register
} }
@@ -103,43 +104,39 @@ func ToVirtual(r Register) Virtual {
} }
type virtual struct { type virtual struct {
id VID idx Index
kind Kind kind Kind
Width Spec
mask uint16
} }
// NewVirtual builds a Virtual register. // NewVirtual builds a Virtual register.
func NewVirtual(id VID, k Kind, w Width) Virtual { func NewVirtual(idx Index, k Kind, s Spec) Virtual {
return virtual{ return virtual{
id: id, idx: idx,
kind: k, kind: k,
Width: w, Spec: s,
} }
} }
func (v virtual) VirtualID() VID { return v.id } func (v virtual) ID() ID { return newid(1, v.kind, v.idx) }
func (v virtual) Kind() Kind { return v.kind } func (v virtual) VirtualIndex() Index { return v.idx }
func (v virtual) Kind() Kind { return v.kind }
func (v virtual) Asm() string { func (v virtual) Asm() string {
// TODO(mbm): decide on virtual register syntax // TODO(mbm): decide on virtual register syntax
return fmt.Sprintf("<virtual:%v:%v:%v>", v.id, v.Kind(), v.Size()) return fmt.Sprintf("<virtual:%v:%v:%v>", v.idx, v.Kind(), v.Size())
}
func (v virtual) SatisfiedBy(p Physical) bool {
return v.Kind() == p.Kind() && v.Size() == p.Size() && (v.mask == 0 || v.mask == p.Mask())
} }
func (v virtual) as(s Spec) Register { func (v virtual) as(s Spec) Register {
return virtual{ return virtual{
id: v.id, idx: v.idx,
kind: v.kind, kind: v.kind,
Width: Width(s.Size()), Spec: s,
mask: s.Mask(),
} }
} }
func (v virtual) register() {} func (v virtual) spec() Spec { return v.Spec }
func (v virtual) register() {}
// Info is a bitmask of register properties. // Info is a bitmask of register properties.
type Info uint8 type Info uint8
@@ -150,13 +147,9 @@ const (
Restricted Info = 1 << iota Restricted Info = 1 << iota
) )
// PID is a physical register ID.
type PID uint16
// Physical is a concrete register. // Physical is a concrete register.
type Physical interface { type Physical interface {
PhysicalID() PID PhysicalIndex() Index
Mask() uint16
Info() Info Info() Info
Register Register
} }
@@ -172,16 +165,16 @@ func ToPhysical(r Register) Physical {
// register implements Physical. // register implements Physical.
type register struct { type register struct {
family *Family family *Family
id PID idx Index
name string name string
info Info info Info
Spec Spec
} }
func newregister(f *Family, s Spec, id PID, name string, flags ...Info) register { func newregister(f *Family, s Spec, idx Index, name string, flags ...Info) register {
r := register{ r := register{
family: f, family: f,
id: id, idx: idx,
name: name, name: name,
info: None, info: None,
Spec: s, Spec: s,
@@ -192,16 +185,18 @@ func newregister(f *Family, s Spec, id PID, name string, flags ...Info) register
return r return r
} }
func (r register) PhysicalID() PID { return r.id } func (r register) ID() ID { return newid(0, r.Kind(), r.idx) }
func (r register) Kind() Kind { return r.family.Kind } func (r register) PhysicalIndex() Index { return r.idx }
func (r register) Asm() string { return r.name } func (r register) Kind() Kind { return r.family.Kind }
func (r register) Info() Info { return r.info } func (r register) Asm() string { return r.name }
func (r register) Info() Info { return r.info }
func (r register) as(s Spec) Register { func (r register) as(s Spec) Register {
return r.family.Lookup(r.PhysicalID(), s) return r.family.Lookup(r.PhysicalIndex(), s)
} }
func (r register) register() {} func (r register) spec() Spec { return r.Spec }
func (r register) register() {}
// Spec defines the size of a register as well as the bit ranges it occupies in // Spec defines the size of a register as well as the bit ranges it occupies in
// an underlying physical register. // an underlying physical register.
@@ -235,13 +230,25 @@ func (s Spec) Size() uint {
return (x >> 1) + (x & 1) return (x >> 1) + (x & 1)
} }
// AreConflicting returns whether registers conflict with each other. // LookupPhysical returns the physical register with the given parameters, or nil if not found.
func AreConflicting(x, y Physical) bool { func LookupPhysical(k Kind, idx Index, s Spec) Physical {
return x.Kind() == y.Kind() && x.PhysicalID() == y.PhysicalID() && (x.Mask()&y.Mask()) != 0 f := FamilyOfKind(k)
if f == nil {
return nil
}
return f.Lookup(idx, s)
}
// LookupID returns the physical register with the given id and spec, or nil if not found.
func LookupID(id ID, s Spec) Physical {
if id.IsVirtual() {
return nil
}
return LookupPhysical(id.Kind(), id.Index(), s)
} }
// Allocation records a register allocation. // Allocation records a register allocation.
type Allocation map[Register]Physical type Allocation map[ID]ID
// NewEmptyAllocation builds an empty register allocation. // NewEmptyAllocation builds an empty register allocation.
func NewEmptyAllocation() Allocation { func NewEmptyAllocation() Allocation {
@@ -251,18 +258,46 @@ func NewEmptyAllocation() Allocation {
// Merge allocations from b into a. Errors if there is disagreement on a common // Merge allocations from b into a. Errors if there is disagreement on a common
// register. // register.
func (a Allocation) Merge(b Allocation) error { func (a Allocation) Merge(b Allocation) error {
for r, p := range b { for id, p := range b {
if alt, found := a[r]; found && alt != p { if alt, found := a[id]; found && alt != p {
return errors.New("disagreement on overlapping register") return errors.New("disagreement on overlapping register")
} }
a[r] = p a[id] = p
} }
return nil return nil
} }
// LookupDefault returns the register assigned to r, or r itself if there is none. // LookupDefault returns the register ID assigned by this allocation, returning
func (a Allocation) LookupDefault(r Register) Register { // id if none is found.
if p, found := a[r]; found { func (a Allocation) LookupDefault(id ID) ID {
if _, found := a[id]; found {
return a[id]
}
return id
}
// LookupRegister the allocation for register r, or return nil if there is none.
func (a Allocation) LookupRegister(r Register) Physical {
// Return immediately if it is already a physical register.
if p := ToPhysical(r); p != nil {
return p
}
// Lookup an allocation for this virtual ID.
id, found := a[r.ID()]
if !found {
return nil
}
return LookupID(id, r.spec())
}
// LookupRegisterDefault returns the register assigned to r, or r itself if there is none.
func (a Allocation) LookupRegisterDefault(r Register) Register {
if r == nil {
return nil
}
if p := a.LookupRegister(r); p != nil {
return p return p
} }
return r return r

View File

@@ -28,7 +28,7 @@ func init() {
} }
} }
// FamilyOfKind returns the Family of registers of the given kind. // FamilyOfKind returns the Family of registers of the given kind, or nil if not found.
func FamilyOfKind(k Kind) *Family { func FamilyOfKind(k Kind) *Family {
return familiesByKind[k] return familiesByKind[k]
} }
@@ -51,17 +51,6 @@ type GP interface {
As64() Register As64() Register
} }
type gpcasts struct {
Register
}
func (c gpcasts) As8() Register { return c.as(S8) }
func (c gpcasts) As8L() Register { return c.as(S8L) }
func (c gpcasts) As8H() Register { return c.as(S8H) }
func (c gpcasts) As16() Register { return c.as(S16) }
func (c gpcasts) As32() Register { return c.as(S32) }
func (c gpcasts) As64() Register { return c.as(S64) }
// GPPhysical is a general-purpose physical register. // GPPhysical is a general-purpose physical register.
type GPPhysical interface { type GPPhysical interface {
Physical Physical
@@ -70,10 +59,16 @@ type GPPhysical interface {
type gpp struct { type gpp struct {
Physical Physical
GP
} }
func newgpp(r Physical) GPPhysical { return gpp{Physical: r, GP: gpcasts{r}} } func newgpp(r Physical) GPPhysical { return gpp{Physical: r} }
func (p gpp) As8() Register { return newgpp(p.as(S8).(Physical)) }
func (p gpp) As8L() Register { return newgpp(p.as(S8L).(Physical)) }
func (p gpp) As8H() Register { return newgpp(p.as(S8H).(Physical)) }
func (p gpp) As16() Register { return newgpp(p.as(S16).(Physical)) }
func (p gpp) As32() Register { return newgpp(p.as(S32).(Physical)) }
func (p gpp) As64() Register { return newgpp(p.as(S64).(Physical)) }
// GPVirtual is a general-purpose virtual register. // GPVirtual is a general-purpose virtual register.
type GPVirtual interface { type GPVirtual interface {
@@ -83,12 +78,18 @@ type GPVirtual interface {
type gpv struct { type gpv struct {
Virtual Virtual
GP
} }
func newgpv(v Virtual) GPVirtual { return gpv{Virtual: v, GP: gpcasts{v}} } func newgpv(v Virtual) GPVirtual { return gpv{Virtual: v} }
func gp(s Spec, id PID, name string, flags ...Info) GPPhysical { func (v gpv) As8() Register { return newgpv(v.as(S8).(Virtual)) }
func (v gpv) As8L() Register { return newgpv(v.as(S8L).(Virtual)) }
func (v gpv) As8H() Register { return newgpv(v.as(S8H).(Virtual)) }
func (v gpv) As16() Register { return newgpv(v.as(S16).(Virtual)) }
func (v gpv) As32() Register { return newgpv(v.as(S32).(Virtual)) }
func (v gpv) As64() Register { return newgpv(v.as(S64).(Virtual)) }
func gp(s Spec, id Index, name string, flags ...Info) GPPhysical {
r := newgpp(newregister(GeneralPurpose, s, id, name, flags...)) r := newgpp(newregister(GeneralPurpose, s, id, name, flags...))
GeneralPurpose.add(r) GeneralPurpose.add(r)
return r return r
@@ -184,14 +185,6 @@ type Vec interface {
AsZ() Register AsZ() Register
} }
type veccasts struct {
Register
}
func (c veccasts) AsX() Register { return c.as(S128) }
func (c veccasts) AsY() Register { return c.as(S256) }
func (c veccasts) AsZ() Register { return c.as(S512) }
// VecPhysical is a physical vector register. // VecPhysical is a physical vector register.
type VecPhysical interface { type VecPhysical interface {
Physical Physical
@@ -203,7 +196,11 @@ type vecp struct {
Vec Vec
} }
func newvecp(r Physical) VecPhysical { return vecp{Physical: r, Vec: veccasts{r}} } func newvecp(r Physical) VecPhysical { return vecp{Physical: r} }
func (p vecp) AsX() Register { return newvecp(p.as(S128).(Physical)) }
func (p vecp) AsY() Register { return newvecp(p.as(S256).(Physical)) }
func (p vecp) AsZ() Register { return newvecp(p.as(S512).(Physical)) }
// VecVirtual is a virtual vector register. // VecVirtual is a virtual vector register.
type VecVirtual interface { type VecVirtual interface {
@@ -216,9 +213,13 @@ type vecv struct {
Vec Vec
} }
func newvecv(v Virtual) VecVirtual { return vecv{Virtual: v, Vec: veccasts{v}} } func newvecv(v Virtual) VecVirtual { return vecv{Virtual: v} }
func vec(s Spec, id PID, name string, flags ...Info) VecPhysical { func (v vecv) AsX() Register { return newvecv(v.as(S128).(Virtual)) }
func (v vecv) AsY() Register { return newvecv(v.as(S256).(Virtual)) }
func (v vecv) AsZ() Register { return newvecv(v.as(S512).(Virtual)) }
func vec(s Spec, id Index, name string, flags ...Info) VecPhysical {
r := newvecp(newregister(Vector, s, id, name, flags...)) r := newvecp(newregister(Vector, s, id, name, flags...))
Vector.add(r) Vector.add(r)
return r return r

View File

@@ -21,7 +21,67 @@ func TestAsMethods(t *testing.T) {
{Z9.AsZ(), Z9}, {Z9.AsZ(), Z9},
} }
for _, c := range cases { for _, c := range cases {
if c[0] != c[1] { if !Equal(c[0], c[1]) {
t.FailNow()
}
}
}
func TestAsPreservesGPPhysical(t *testing.T) {
cases := []Register{
RAX.As8(),
R13.As8L(),
AL.As8H(),
EAX.As16(),
CH.As32(),
EBX.As64(),
}
for _, r := range cases {
if _, ok := r.(GPPhysical); !ok {
t.FailNow()
}
}
}
func TestAsPreservesGPVirtual(t *testing.T) {
collection := NewCollection()
cases := []Register{
collection.GP16().As8(),
collection.GP32().As8L(),
collection.GP64().As8H(),
collection.GP8().As16(),
collection.GP8L().As32(),
collection.GP8H().As64(),
}
for _, r := range cases {
if _, ok := r.(GPVirtual); !ok {
t.FailNow()
}
}
}
func TestAsPreservesVecPhysical(t *testing.T) {
cases := []Register{
Y13.AsX(),
X3.AsY(),
Y10.AsZ(),
}
for _, r := range cases {
if _, ok := r.(VecPhysical); !ok {
t.FailNow()
}
}
}
func TestAsPreservesVecVirtual(t *testing.T) {
collection := NewCollection()
cases := []Register{
collection.ZMM().AsX(),
collection.XMM().AsY(),
collection.YMM().AsZ(),
}
for _, r := range cases {
if _, ok := r.(VecVirtual); !ok {
t.FailNow() t.FailNow()
} }
} }

View File

@@ -21,8 +21,17 @@ func main() {
// Allocate registers and initialize. // Allocate registers and initialize.
x := make([]Register, n) x := make([]Register, n)
for i := 0; i < n; i++ { i := 0
x[i] = GP8()
// Low byte registers.
for ; i < 15; i++ {
x[i] = GP8L()
MOVB(U8(i+1), x[i])
}
// High byte registers.
for ; i < n; i++ {
x[i] = GP8H()
MOVB(U8(i+1), x[i]) MOVB(U8(i+1), x[i])
} }

View File

@@ -8,28 +8,24 @@ TEXT ·GP8(SB), NOSPLIT, $0-1
MOVB $0x02, CL MOVB $0x02, CL
MOVB $0x03, DL MOVB $0x03, DL
MOVB $0x04, BL MOVB $0x04, BL
MOVB $0x05, AH MOVB $0x05, BP
MOVB $0x06, CH MOVB $0x06, SI
MOVB $0x07, DH MOVB $0x07, DI
MOVB $0x08, BH MOVB $0x08, R8
MOVB $0x09, BP MOVB $0x09, R9
MOVB $0x0a, SI MOVB $0x0a, R10
MOVB $0x0b, DI MOVB $0x0b, R11
MOVB $0x0c, R8 MOVB $0x0c, R12
MOVB $0x0d, R9 MOVB $0x0d, R13
MOVB $0x0e, R10 MOVB $0x0e, R14
MOVB $0x0f, R11 MOVB $0x0f, R15
MOVB $0x10, R12 MOVB $0x10, AH
MOVB $0x11, R13 MOVB $0x11, CH
MOVB $0x12, R14 MOVB $0x12, DH
MOVB $0x13, R15 MOVB $0x13, BH
ADDB CL, AL ADDB CL, AL
ADDB DL, AL ADDB DL, AL
ADDB BL, AL ADDB BL, AL
ADDB AH, AL
ADDB CH, AL
ADDB DH, AL
ADDB BH, AL
ADDB BP, AL ADDB BP, AL
ADDB SI, AL ADDB SI, AL
ADDB DI, AL ADDB DI, AL
@@ -41,5 +37,9 @@ TEXT ·GP8(SB), NOSPLIT, $0-1
ADDB R13, AL ADDB R13, AL
ADDB R14, AL ADDB R14, AL
ADDB R15, AL ADDB R15, AL
ADDB AH, AL
ADDB CH, AL
ADDB DH, AL
ADDB BH, AL
MOVB AL, ret+0(FP) MOVB AL, ret+0(FP)
RET RET

66
tests/alloc/masks/asm.go Normal file
View File

@@ -0,0 +1,66 @@
// +build ignore
package main
import (
"strconv"
. "github.com/mmcloughlin/avo/build"
. "github.com/mmcloughlin/avo/operand"
. "github.com/mmcloughlin/avo/reg"
)
// The goal of this test is to create a synthetic scenario in which register
// allocation would fail if register liveness and allocation passes didn't take
// masks into account.
//
// The idea is to create a set of 15 64-bit virtual registers (15 being total
// number of allocatable 64-bit general purpose registers). For each one: write
// to the whole register and then later write to only the low 16 bits, and
// finally consume the whole 64-bit register. This means there is an interval in
// which only the high 48-bits are live. During this interval we should be able
// to allocate and use a set of 15 16-bit virtual registers.
func main() {
const n = 15
TEXT("Masks", NOSPLIT, "func() (uint16, uint64)")
Doc("Masks computes the sum 1+2+...+" + strconv.Itoa(n) + " in two ways.")
// Step 1: Allocate n 64-bit registers A that we will arrange to live in their top 48 bits.
A := make([]GPVirtual, n)
for i := 0; i < n; i++ {
A[i] = GP64()
c := ((i + 1) << 16) | 42 // 42 in low bits will be cleared later
MOVQ(U32(c), A[i])
}
// Step 3: Allocate n 16-bit registers B.
B := make([]Register, n)
for i := 0; i < n; i++ {
B[i] = GP16()
MOVW(U16(i+1), B[i])
}
// Step 3: Sum up all the B registers and return.
for i := 1; i < n; i++ {
ADDW(B[i], B[0])
}
Store(B[0], ReturnIndex(0))
// Step 4: Clear the low 16-bits of the A registers.
for i := 0; i < n; i++ {
MOVW(U16(0), A[i].As16())
}
// Step 5: Sum up all the A registers and return.
for i := 1; i < n; i++ {
ADDQ(A[i], A[0])
}
SHRQ(U8(16), A[0])
Store(A[0], ReturnIndex(1))
RET()
Generate()
}

2
tests/alloc/masks/doc.go Normal file
View File

@@ -0,0 +1,2 @@
// Package masks tests that register liveness and allocation passes handle masks correctly.
package masks

83
tests/alloc/masks/masks.s Normal file
View File

@@ -0,0 +1,83 @@
// Code generated by command: go run asm.go -out masks.s -stubs stub.go. DO NOT EDIT.
#include "textflag.h"
// func Masks() (uint16, uint64)
TEXT ·Masks(SB), NOSPLIT, $0-16
MOVQ $0x0001002a, AX
MOVQ $0x0002002a, CX
MOVQ $0x0003002a, DX
MOVQ $0x0004002a, BX
MOVQ $0x0005002a, BP
MOVQ $0x0006002a, SI
MOVQ $0x0007002a, DI
MOVQ $0x0008002a, R8
MOVQ $0x0009002a, R9
MOVQ $0x000a002a, R10
MOVQ $0x000b002a, R11
MOVQ $0x000c002a, R12
MOVQ $0x000d002a, R13
MOVQ $0x000e002a, R14
MOVQ $0x000f002a, R15
MOVW $0x0001, AX
MOVW $0x0002, CX
MOVW $0x0003, DX
MOVW $0x0004, BX
MOVW $0x0005, BP
MOVW $0x0006, SI
MOVW $0x0007, DI
MOVW $0x0008, R8
MOVW $0x0009, R9
MOVW $0x000a, R10
MOVW $0x000b, R11
MOVW $0x000c, R12
MOVW $0x000d, R13
MOVW $0x000e, R14
MOVW $0x000f, R15
ADDW CX, AX
ADDW DX, AX
ADDW BX, AX
ADDW BP, AX
ADDW SI, AX
ADDW DI, AX
ADDW R8, AX
ADDW R9, AX
ADDW R10, AX
ADDW R11, AX
ADDW R12, AX
ADDW R13, AX
ADDW R14, AX
ADDW R15, AX
MOVW AX, ret+0(FP)
MOVW $0x0000, AX
MOVW $0x0000, CX
MOVW $0x0000, DX
MOVW $0x0000, BX
MOVW $0x0000, BP
MOVW $0x0000, SI
MOVW $0x0000, DI
MOVW $0x0000, R8
MOVW $0x0000, R9
MOVW $0x0000, R10
MOVW $0x0000, R11
MOVW $0x0000, R12
MOVW $0x0000, R13
MOVW $0x0000, R14
MOVW $0x0000, R15
ADDQ CX, AX
ADDQ DX, AX
ADDQ BX, AX
ADDQ BP, AX
ADDQ SI, AX
ADDQ DI, AX
ADDQ R8, AX
ADDQ R9, AX
ADDQ R10, AX
ADDQ R11, AX
ADDQ R12, AX
ADDQ R13, AX
ADDQ R14, AX
ADDQ R15, AX
SHRQ $0x10, AX
MOVQ AX, ret1+8(FP)
RET

View File

@@ -0,0 +1,15 @@
package masks
import (
"testing"
)
//go:generate go run asm.go -out masks.s -stubs stub.go
func TestMasks(t *testing.T) {
const n = 15
const expect = n * (n + 1) / 2
if got16, got64 := Masks(); got16 != expect || got64 != expect {
t.Fatalf("Masks() = %v, %v; expect %v, %v", got16, got64, expect, expect)
}
}

View File

@@ -0,0 +1,6 @@
// Code generated by command: go run asm.go -out masks.s -stubs stub.go. DO NOT EDIT.
package masks
// Masks computes the sum 1+2+...+15 in two ways.
func Masks() (uint16, uint64)

View File

@@ -0,0 +1,66 @@
// +build ignore
package main
import (
"strconv"
. "github.com/mmcloughlin/avo/build"
. "github.com/mmcloughlin/avo/operand"
. "github.com/mmcloughlin/avo/reg"
)
// The goal is to test for correct handling of 32-bit operands in 64-bit mode,
// specifically that writes are zero-extended to 64 bits. This test is
// constructed such that the register allocator would fail if this feature is
// not accounted for. It consists of multiple copies of a 32-bit write followed
// by a 64-bit read of the same register. Without special treatment liveness
// analysis would consider the upper 32 bits to still be live prior to the
// write. Therefore if we stack up enough copies of this, we could cause the
// register allocator to fail.
func main() {
const (
r = 14 // number of registers
m = 3 // number of iterations
n = r * m
)
TEXT("Upper32", NOSPLIT, "func() uint64")
Doc("Upper32 computes the sum 1+2+...+" + strconv.Itoa(n) + ".")
Comment("Initialize sum.")
s := GP64()
XORQ(s, s)
// Allocate n 64-bit registers and populate them.
Comment("Initialize registers.")
x := make([]GPVirtual, n)
for i := 0; i < n; i++ {
x[i] = GP64()
MOVQ(U64(0x9e77d78aacb8cbcc), x[i])
}
k := 0
for i := 0; i < m; i++ {
Commentf("Iteration %d.", i+1)
// Write to the 32-bit aliases of r registers.
for j := 0; j < r; j++ {
MOVL(U32(k+j+1), x[k+j].As32())
}
// Sum them up.
for j := 0; j < r; j++ {
ADDQ(x[k+j], s)
}
k += r
}
Comment("Store result and return.")
Store(s, ReturnIndex(0))
RET()
Generate()
}

View File

@@ -0,0 +1,2 @@
// Package upper32 tests liveness analysis of 32-bit operations on 64-bit registers.
package upper32

View File

@@ -0,0 +1,6 @@
// Code generated by command: go run asm.go -out upper32.s -stubs stub.go. DO NOT EDIT.
package upper32
// Upper32 computes the sum 1+2+...+42.
func Upper32() uint64

View File

@@ -0,0 +1,146 @@
// Code generated by command: go run asm.go -out upper32.s -stubs stub.go. DO NOT EDIT.
#include "textflag.h"
// func Upper32() uint64
TEXT ·Upper32(SB), NOSPLIT, $0-8
// Initialize sum.
XORQ AX, AX
// Initialize registers.
MOVQ $0x9e77d78aacb8cbcc, CX
MOVQ $0x9e77d78aacb8cbcc, DX
MOVQ $0x9e77d78aacb8cbcc, BX
MOVQ $0x9e77d78aacb8cbcc, BP
MOVQ $0x9e77d78aacb8cbcc, SI
MOVQ $0x9e77d78aacb8cbcc, DI
MOVQ $0x9e77d78aacb8cbcc, R8
MOVQ $0x9e77d78aacb8cbcc, R9
MOVQ $0x9e77d78aacb8cbcc, R10
MOVQ $0x9e77d78aacb8cbcc, R11
MOVQ $0x9e77d78aacb8cbcc, R12
MOVQ $0x9e77d78aacb8cbcc, R13
MOVQ $0x9e77d78aacb8cbcc, R14
MOVQ $0x9e77d78aacb8cbcc, R15
MOVQ $0x9e77d78aacb8cbcc, CX
MOVQ $0x9e77d78aacb8cbcc, DX
MOVQ $0x9e77d78aacb8cbcc, BX
MOVQ $0x9e77d78aacb8cbcc, BP
MOVQ $0x9e77d78aacb8cbcc, SI
MOVQ $0x9e77d78aacb8cbcc, DI
MOVQ $0x9e77d78aacb8cbcc, R8
MOVQ $0x9e77d78aacb8cbcc, R9
MOVQ $0x9e77d78aacb8cbcc, R10
MOVQ $0x9e77d78aacb8cbcc, R11
MOVQ $0x9e77d78aacb8cbcc, R12
MOVQ $0x9e77d78aacb8cbcc, R13
MOVQ $0x9e77d78aacb8cbcc, R14
MOVQ $0x9e77d78aacb8cbcc, R15
MOVQ $0x9e77d78aacb8cbcc, CX
MOVQ $0x9e77d78aacb8cbcc, DX
MOVQ $0x9e77d78aacb8cbcc, BX
MOVQ $0x9e77d78aacb8cbcc, BP
MOVQ $0x9e77d78aacb8cbcc, SI
MOVQ $0x9e77d78aacb8cbcc, DI
MOVQ $0x9e77d78aacb8cbcc, R8
MOVQ $0x9e77d78aacb8cbcc, R9
MOVQ $0x9e77d78aacb8cbcc, R10
MOVQ $0x9e77d78aacb8cbcc, R11
MOVQ $0x9e77d78aacb8cbcc, R12
MOVQ $0x9e77d78aacb8cbcc, R13
MOVQ $0x9e77d78aacb8cbcc, R14
MOVQ $0x9e77d78aacb8cbcc, R15
// Iteration 1.
MOVL $0x00000001, CX
MOVL $0x00000002, DX
MOVL $0x00000003, BX
MOVL $0x00000004, BP
MOVL $0x00000005, SI
MOVL $0x00000006, DI
MOVL $0x00000007, R8
MOVL $0x00000008, R9
MOVL $0x00000009, R10
MOVL $0x0000000a, R11
MOVL $0x0000000b, R12
MOVL $0x0000000c, R13
MOVL $0x0000000d, R14
MOVL $0x0000000e, R15
ADDQ CX, AX
ADDQ DX, AX
ADDQ BX, AX
ADDQ BP, AX
ADDQ SI, AX
ADDQ DI, AX
ADDQ R8, AX
ADDQ R9, AX
ADDQ R10, AX
ADDQ R11, AX
ADDQ R12, AX
ADDQ R13, AX
ADDQ R14, AX
ADDQ R15, AX
// Iteration 2.
MOVL $0x0000000f, CX
MOVL $0x00000010, DX
MOVL $0x00000011, BX
MOVL $0x00000012, BP
MOVL $0x00000013, SI
MOVL $0x00000014, DI
MOVL $0x00000015, R8
MOVL $0x00000016, R9
MOVL $0x00000017, R10
MOVL $0x00000018, R11
MOVL $0x00000019, R12
MOVL $0x0000001a, R13
MOVL $0x0000001b, R14
MOVL $0x0000001c, R15
ADDQ CX, AX
ADDQ DX, AX
ADDQ BX, AX
ADDQ BP, AX
ADDQ SI, AX
ADDQ DI, AX
ADDQ R8, AX
ADDQ R9, AX
ADDQ R10, AX
ADDQ R11, AX
ADDQ R12, AX
ADDQ R13, AX
ADDQ R14, AX
ADDQ R15, AX
// Iteration 3.
MOVL $0x0000001d, CX
MOVL $0x0000001e, DX
MOVL $0x0000001f, BX
MOVL $0x00000020, BP
MOVL $0x00000021, SI
MOVL $0x00000022, DI
MOVL $0x00000023, R8
MOVL $0x00000024, R9
MOVL $0x00000025, R10
MOVL $0x00000026, R11
MOVL $0x00000027, R12
MOVL $0x00000028, R13
MOVL $0x00000029, R14
MOVL $0x0000002a, R15
ADDQ CX, AX
ADDQ DX, AX
ADDQ BX, AX
ADDQ BP, AX
ADDQ SI, AX
ADDQ DI, AX
ADDQ R8, AX
ADDQ R9, AX
ADDQ R10, AX
ADDQ R11, AX
ADDQ R12, AX
ADDQ R13, AX
ADDQ R14, AX
ADDQ R15, AX
// Store result and return.
MOVQ AX, ret+0(FP)
RET

View File

@@ -0,0 +1,15 @@
package upper32
import (
"testing"
)
//go:generate go run asm.go -out upper32.s -stubs stub.go
func TestUpper32(t *testing.T) {
const n = 14 * 3
const expect = n * (n + 1) / 2
if got := Upper32(); got != expect {
t.Fatalf("Upper32() = %v; expect %v", got, expect)
}
}

View File

@@ -0,0 +1,22 @@
// +build ignore
package main
import (
. "github.com/mmcloughlin/avo/build"
. "github.com/mmcloughlin/avo/operand"
)
func main() {
TEXT("Issue100", NOSPLIT, "func() uint64")
x := GP64()
XORQ(x, x)
for i := 1; i <= 100; i++ {
t := GP64()
MOVQ(U32(i), t)
ADDQ(t.As64(), x)
}
Store(x, ReturnIndex(0))
RET()
Generate()
}

View File

@@ -0,0 +1,2 @@
// Package issue100 contains a reproducer for a bug in aliased register allocation.
package issue100

View File

@@ -0,0 +1,209 @@
// Code generated by command: go run asm.go -out issue100.s -stubs stub.go. DO NOT EDIT.
#include "textflag.h"
// func Issue100() uint64
TEXT ·Issue100(SB), NOSPLIT, $0-8
XORQ AX, AX
MOVQ $0x00000001, CX
ADDQ CX, AX
MOVQ $0x00000002, CX
ADDQ CX, AX
MOVQ $0x00000003, CX
ADDQ CX, AX
MOVQ $0x00000004, CX
ADDQ CX, AX
MOVQ $0x00000005, CX
ADDQ CX, AX
MOVQ $0x00000006, CX
ADDQ CX, AX
MOVQ $0x00000007, CX
ADDQ CX, AX
MOVQ $0x00000008, CX
ADDQ CX, AX
MOVQ $0x00000009, CX
ADDQ CX, AX
MOVQ $0x0000000a, CX
ADDQ CX, AX
MOVQ $0x0000000b, CX
ADDQ CX, AX
MOVQ $0x0000000c, CX
ADDQ CX, AX
MOVQ $0x0000000d, CX
ADDQ CX, AX
MOVQ $0x0000000e, CX
ADDQ CX, AX
MOVQ $0x0000000f, CX
ADDQ CX, AX
MOVQ $0x00000010, CX
ADDQ CX, AX
MOVQ $0x00000011, CX
ADDQ CX, AX
MOVQ $0x00000012, CX
ADDQ CX, AX
MOVQ $0x00000013, CX
ADDQ CX, AX
MOVQ $0x00000014, CX
ADDQ CX, AX
MOVQ $0x00000015, CX
ADDQ CX, AX
MOVQ $0x00000016, CX
ADDQ CX, AX
MOVQ $0x00000017, CX
ADDQ CX, AX
MOVQ $0x00000018, CX
ADDQ CX, AX
MOVQ $0x00000019, CX
ADDQ CX, AX
MOVQ $0x0000001a, CX
ADDQ CX, AX
MOVQ $0x0000001b, CX
ADDQ CX, AX
MOVQ $0x0000001c, CX
ADDQ CX, AX
MOVQ $0x0000001d, CX
ADDQ CX, AX
MOVQ $0x0000001e, CX
ADDQ CX, AX
MOVQ $0x0000001f, CX
ADDQ CX, AX
MOVQ $0x00000020, CX
ADDQ CX, AX
MOVQ $0x00000021, CX
ADDQ CX, AX
MOVQ $0x00000022, CX
ADDQ CX, AX
MOVQ $0x00000023, CX
ADDQ CX, AX
MOVQ $0x00000024, CX
ADDQ CX, AX
MOVQ $0x00000025, CX
ADDQ CX, AX
MOVQ $0x00000026, CX
ADDQ CX, AX
MOVQ $0x00000027, CX
ADDQ CX, AX
MOVQ $0x00000028, CX
ADDQ CX, AX
MOVQ $0x00000029, CX
ADDQ CX, AX
MOVQ $0x0000002a, CX
ADDQ CX, AX
MOVQ $0x0000002b, CX
ADDQ CX, AX
MOVQ $0x0000002c, CX
ADDQ CX, AX
MOVQ $0x0000002d, CX
ADDQ CX, AX
MOVQ $0x0000002e, CX
ADDQ CX, AX
MOVQ $0x0000002f, CX
ADDQ CX, AX
MOVQ $0x00000030, CX
ADDQ CX, AX
MOVQ $0x00000031, CX
ADDQ CX, AX
MOVQ $0x00000032, CX
ADDQ CX, AX
MOVQ $0x00000033, CX
ADDQ CX, AX
MOVQ $0x00000034, CX
ADDQ CX, AX
MOVQ $0x00000035, CX
ADDQ CX, AX
MOVQ $0x00000036, CX
ADDQ CX, AX
MOVQ $0x00000037, CX
ADDQ CX, AX
MOVQ $0x00000038, CX
ADDQ CX, AX
MOVQ $0x00000039, CX
ADDQ CX, AX
MOVQ $0x0000003a, CX
ADDQ CX, AX
MOVQ $0x0000003b, CX
ADDQ CX, AX
MOVQ $0x0000003c, CX
ADDQ CX, AX
MOVQ $0x0000003d, CX
ADDQ CX, AX
MOVQ $0x0000003e, CX
ADDQ CX, AX
MOVQ $0x0000003f, CX
ADDQ CX, AX
MOVQ $0x00000040, CX
ADDQ CX, AX
MOVQ $0x00000041, CX
ADDQ CX, AX
MOVQ $0x00000042, CX
ADDQ CX, AX
MOVQ $0x00000043, CX
ADDQ CX, AX
MOVQ $0x00000044, CX
ADDQ CX, AX
MOVQ $0x00000045, CX
ADDQ CX, AX
MOVQ $0x00000046, CX
ADDQ CX, AX
MOVQ $0x00000047, CX
ADDQ CX, AX
MOVQ $0x00000048, CX
ADDQ CX, AX
MOVQ $0x00000049, CX
ADDQ CX, AX
MOVQ $0x0000004a, CX
ADDQ CX, AX
MOVQ $0x0000004b, CX
ADDQ CX, AX
MOVQ $0x0000004c, CX
ADDQ CX, AX
MOVQ $0x0000004d, CX
ADDQ CX, AX
MOVQ $0x0000004e, CX
ADDQ CX, AX
MOVQ $0x0000004f, CX
ADDQ CX, AX
MOVQ $0x00000050, CX
ADDQ CX, AX
MOVQ $0x00000051, CX
ADDQ CX, AX
MOVQ $0x00000052, CX
ADDQ CX, AX
MOVQ $0x00000053, CX
ADDQ CX, AX
MOVQ $0x00000054, CX
ADDQ CX, AX
MOVQ $0x00000055, CX
ADDQ CX, AX
MOVQ $0x00000056, CX
ADDQ CX, AX
MOVQ $0x00000057, CX
ADDQ CX, AX
MOVQ $0x00000058, CX
ADDQ CX, AX
MOVQ $0x00000059, CX
ADDQ CX, AX
MOVQ $0x0000005a, CX
ADDQ CX, AX
MOVQ $0x0000005b, CX
ADDQ CX, AX
MOVQ $0x0000005c, CX
ADDQ CX, AX
MOVQ $0x0000005d, CX
ADDQ CX, AX
MOVQ $0x0000005e, CX
ADDQ CX, AX
MOVQ $0x0000005f, CX
ADDQ CX, AX
MOVQ $0x00000060, CX
ADDQ CX, AX
MOVQ $0x00000061, CX
ADDQ CX, AX
MOVQ $0x00000062, CX
ADDQ CX, AX
MOVQ $0x00000063, CX
ADDQ CX, AX
MOVQ $0x00000064, CX
ADDQ CX, AX
MOVQ AX, ret+0(FP)
RET

View File

@@ -0,0 +1,15 @@
package issue100
import (
"testing"
)
//go:generate go run asm.go -out issue100.s -stubs stub.go
func TestIssue100(t *testing.T) {
n := uint64(100)
expect := n * (n + 1) / 2
if got := Issue100(); got != expect {
t.Fatalf("Issue100() = %v; expect %v", got, expect)
}
}

View File

@@ -0,0 +1,5 @@
// Code generated by command: go run asm.go -out issue100.s -stubs stub.go. DO NOT EDIT.
package issue100
func Issue100() uint64

View File

@@ -0,0 +1,23 @@
// +build generate
//go:generate go run $GOFILE
// Regression test for a bug where casting a physical register would give the
// error "non physical register found".
//
// See: https://github.com/mmcloughlin/avo/issues/65#issuecomment-576850145
package main
import (
. "github.com/mmcloughlin/avo/build"
. "github.com/mmcloughlin/avo/operand"
. "github.com/mmcloughlin/avo/reg"
)
func main() {
TEXT("Issue65", NOSPLIT, "func()")
VINSERTI128(Imm(1), Y0.AsX(), Y1, Y2)
RET()
Generate()
}