diff --git a/cl/compile.go b/cl/compile.go index 4517303e..419618cf 100644 --- a/cl/compile.go +++ b/cl/compile.go @@ -543,12 +543,9 @@ func (p *context) compileInstrOrValue(b llssa.Builder, iv instrOrValue, asValue x := p.compileValue(b, v.X) ret = b.MakeInterface(t, x) case *ssa.MakeSlice: - var nCap llssa.Expr t := p.prog.Type(v.Type(), llssa.InGo) nLen := p.compileValue(b, v.Len) - if v.Cap != nil { - nCap = p.compileValue(b, v.Cap) - } + nCap := p.compileValue(b, v.Cap) ret = b.MakeSlice(t, nLen, nCap) case *ssa.MakeMap: var nReserve llssa.Expr diff --git a/internal/runtime/goarch/goarch.go b/internal/runtime/goarch/goarch.go new file mode 100644 index 00000000..24c0de95 --- /dev/null +++ b/internal/runtime/goarch/goarch.go @@ -0,0 +1,3 @@ +package goarch + +const PtrSize = 4 << (^uintptr(0) >> 63) diff --git a/internal/runtime/math/math.go b/internal/runtime/math/math.go new file mode 100644 index 00000000..8d38eab8 --- /dev/null +++ b/internal/runtime/math/math.go @@ -0,0 +1,15 @@ +package math + +import "github.com/goplus/llgo/internal/runtime/goarch" + +const MaxUintptr = ^uintptr(0) + +// MulUintptr returns a * b and whether the multiplication overflowed. +// On supported platforms this is an intrinsic lowered by the compiler. +func MulUintptr(a, b uintptr) (uintptr, bool) { + if a|b < 1<<(4*goarch.PtrSize) || a == 0 { + return a * b, false + } + overflow := b > MaxUintptr/a + return a * b, overflow +} diff --git a/internal/runtime/stubs.go b/internal/runtime/stubs.go index ed8b9a8d..0b81dc67 100644 --- a/internal/runtime/stubs.go +++ b/internal/runtime/stubs.go @@ -36,3 +36,10 @@ func fastrand() uint32 { return s0 + s1 } */ + +const ( + // _64bit = 1 on 64-bit systems, 0 on 32-bit systems + _64bit = 1 << (^uintptr(0) >> 63) / 2 + heapAddrBits = (_64bit)*48 + (1-_64bit)*(32) + maxAlloc = (1 << heapAddrBits) - (1-_64bit)*1 +) diff --git a/internal/runtime/z_slice.go b/internal/runtime/z_slice.go index 1d2dcc4a..6eed5167 100644 --- a/internal/runtime/z_slice.go +++ b/internal/runtime/z_slice.go @@ -20,6 +20,7 @@ import ( "unsafe" "github.com/goplus/llgo/c" + "github.com/goplus/llgo/internal/runtime/math" ) // ----------------------------------------------------------------------------- @@ -120,32 +121,10 @@ func SliceCopy(dst Slice, data unsafe.Pointer, num int, etSize int) int { return n } -const ( - // _64bit = 1 on 64-bit systems, 0 on 32-bit systems - _64bit = 1 << (^uintptr(0) >> 63) / 2 - heapAddrBits = (_64bit)*48 + (1-_64bit)*(32) - maxAlloc = (1 << heapAddrBits) - (1-_64bit)*1 -) - -const ( - PtrSize = 4 << (^uintptr(0) >> 63) - MaxUintptr = ^uintptr(0) -) - -// MulUintptr returns a * b and whether the multiplication overflowed. -// On supported platforms this is an intrinsic lowered by the compiler. -func MulUintptr(a, b uintptr) (uintptr, bool) { - if a|b < 1<<(4*PtrSize) || a == 0 { - return a * b, false - } - overflow := b > MaxUintptr/a - return a * b, overflow -} - func MakeSlice(len, cap int, etSize int) Slice { - mem, overflow := MulUintptr(uintptr(etSize), uintptr(cap)) + mem, overflow := math.MulUintptr(uintptr(etSize), uintptr(cap)) if overflow || mem > maxAlloc || len < 0 || len > cap { - mem, overflow := MulUintptr(uintptr(etSize), uintptr(len)) + mem, overflow := math.MulUintptr(uintptr(etSize), uintptr(len)) if overflow || mem > maxAlloc || len < 0 { panicmakeslicelen() } diff --git a/ssa/datastruct.go b/ssa/datastruct.go index 97e6dd1d..9ce447fa 100644 --- a/ssa/datastruct.go +++ b/ssa/datastruct.go @@ -417,11 +417,7 @@ func (b Builder) MakeSlice(t Type, len, cap Expr) (ret Expr) { } prog := b.Prog len = b.fitIntSize(len) - if cap.IsNil() { - cap = len - } else { - cap = b.fitIntSize(cap) - } + cap = b.fitIntSize(cap) telem := prog.Index(t) ret = b.InlineCall(b.Pkg.rtFunc("MakeSlice"), len, cap, prog.IntVal(prog.SizeOf(telem), prog.Int())) ret.Type = t