mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
[dev.boringcrypto] crypto/hmac: merge up to 2a206c7 and skip test
TestNonUniqueHash will not work on boringcrypto because the hash.Hash that sha256 provides is noncomparable. Change-Id: Ie3dc2d5d775953c381674e22272cb3433daa1b31
This commit is contained in:
commit
95ceba18d3
598 changed files with 26193 additions and 24792 deletions
|
|
@ -806,10 +806,9 @@ tracker will automatically mark the issue as fixed.
|
|||
|
||||
<p>
|
||||
If the change is a partial step towards the resolution of the issue,
|
||||
uses the notation "Updates #12345".
|
||||
This will leave a comment in the issue
|
||||
linking back to the change in Gerrit, but it will not close the issue
|
||||
when the change is applied.
|
||||
write "Updates #12345" instead.
|
||||
This will leave a comment in the issue linking back to the change in
|
||||
Gerrit, but it will not close the issue when the change is applied.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
|
|
|
|||
|
|
@ -454,6 +454,8 @@ environmental variable is set accordingly.</p>
|
|||
<li>GODEBUG=gctrace=1 prints garbage collector events at
|
||||
each collection, summarizing the amount of memory collected
|
||||
and the length of the pause.</li>
|
||||
<li>GODEBUG=inittrace=1 prints a summary of execution time and memory allocation
|
||||
information for completed package initilization work.</li>
|
||||
<li>GODEBUG=schedtrace=X prints scheduling events every X milliseconds.</li>
|
||||
</ul>
|
||||
|
||||
|
|
|
|||
119
doc/go1.16.html
119
doc/go1.16.html
|
|
@ -31,8 +31,22 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
|
||||
<h2 id="ports">Ports</h2>
|
||||
|
||||
<p>
|
||||
TODO
|
||||
<h3 id="netbsd">NetBSD</h3>
|
||||
|
||||
<p><!-- golang.org/issue/30824 -->
|
||||
Go now supports the 64-bit ARM architecture on NetBSD (the
|
||||
<code>netbsd/arm64</code> port).
|
||||
</p>
|
||||
|
||||
<h3 id="386">386</h3>
|
||||
|
||||
<p><!-- golang.org/issue/40255, golang.org/issue/41848, CL 258957, and CL 260017 -->
|
||||
As <a href="go1.15#386">announced</a> in the Go 1.15 release notes,
|
||||
Go 1.16 drops support for x87 mode compilation (<code>GO386=387</code>).
|
||||
Support for non-SSE2 processors is now available using soft float
|
||||
mode (<code>GO386=softfloat</code>).
|
||||
Users running on non-SSE2 processors should replace <code>GO386=387</code>
|
||||
with <code>GO386=softfloat</code>.
|
||||
</p>
|
||||
|
||||
<h2 id="tools">Tools</h2>
|
||||
|
|
@ -85,6 +99,17 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
that is still considered to be a passing test.
|
||||
</p>
|
||||
|
||||
<p><!-- golang.org/issue/37519 -->
|
||||
The <code>go</code> <code>get</code> <code>-insecure</code> flag is
|
||||
deprecated and will be removed in a future version. This flag permits
|
||||
fetching from repositories and resolving custom domains using insecure
|
||||
schemes such as HTTP, and also bypassess module sum validation using the
|
||||
checksum database. To permit the use of insecure schemes, use the
|
||||
<code>GOINSECURE</code> environment variable instead. To bypass module
|
||||
sum validation, use <code>GOPRIVATE</code> or <code>GONOSUMDB</code>.
|
||||
See <code>go</code> <code>help</code> <code>environment</code> for details.
|
||||
</p>
|
||||
|
||||
<h4 id="all-pattern">The <code>all</code> pattern</h4>
|
||||
|
||||
<p><!-- golang.org/cl/240623 -->
|
||||
|
|
@ -148,12 +173,49 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
TODO: update with final numbers later in the release.
|
||||
</p>
|
||||
|
||||
<!-- CL 255259: https://golang.org/cl/255259: cmd/link: enable ASLR on windows binaries built with -buildmode=c-shared -->
|
||||
|
||||
<h2 id="library">Core library</h2>
|
||||
|
||||
<p>
|
||||
TODO
|
||||
</p>
|
||||
|
||||
<h3 id="crypto/hmac"><a href="/pkg/crypto/hmac">crypto/hmac</a></h3>
|
||||
|
||||
<p><!-- CL 261960 -->
|
||||
<a href="/pkg/crypto/hmac/#New">New</a> will now panic if separate calls to
|
||||
the hash generation function fail to return new values. Previously, the
|
||||
behavior was undefined and invalid outputs were sometimes generated.
|
||||
</p>
|
||||
|
||||
<h3 id="crypto/tls"><a href="/pkg/crypto/tls">crypto/tls</a></h3>
|
||||
|
||||
<p><!-- CL 256897 -->
|
||||
I/O operations on closing or closed TLS connections can now be detected using
|
||||
the new <a href="/pkg/net/#ErrClosed">ErrClosed</a> error. A typical use
|
||||
would be <code>errors.Is(err, net.ErrClosed)</code>. In earlier releases
|
||||
the only way to reliably detect this case was to match the string returned
|
||||
by the <code>Error</code> method with <code>"tls: use of closed connection"</code>.
|
||||
</p>
|
||||
|
||||
<h3 id="crypto/x509"><a href="/pkg/crypto/x509">crypto/x509</a></h3>
|
||||
|
||||
<p><!-- CL 235078 -->
|
||||
<a href="/pkg/crypto/x509/#ParseCertificate">ParseCertificate</a> and
|
||||
<a href="/pkg/crypto/x509/#CreateCertificate">CreateCertificate</a> both
|
||||
now enforce string encoding restrictions for the fields <code>DNSNames</code>,
|
||||
<code>EmailAddresses</code>, and <code>URIs</code>. These fields can only
|
||||
contain strings with characters within the ASCII range.
|
||||
</p>
|
||||
|
||||
<p><!-- CL 259697 -->
|
||||
<a href="/pkg/crypto/x509/#CreateCertificate">CreateCertificate</a> now
|
||||
verifies the generated certificate's signature using the signer's
|
||||
public key. If the signature is invalid, an error is returned, instead
|
||||
of a malformed certificate.
|
||||
</p>
|
||||
|
||||
<h3 id="net"><a href="/pkg/net/">net</a></h3>
|
||||
|
||||
<p><!-- CL 250357 -->
|
||||
|
|
@ -166,6 +228,14 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
with <code>"use of closed network connection"</code>.
|
||||
</p>
|
||||
|
||||
<h3 id="reflect"><a href="/pkg/reflect/">reflect</a></h3>
|
||||
|
||||
<p><!-- CL 259237, golang.org/issue/22075 -->
|
||||
For interface types and values, <a href="/pkg/reflect/#Value.Method">Method</a>,
|
||||
<a href="/pkg/reflect/#Value.MethodByName">MethodByName</a>, and
|
||||
<a href="/pkg/reflect/#Value.NumMethod">NumMethod</a> now
|
||||
operate on the interface's exported method set, rather than its full method set.
|
||||
</p>
|
||||
|
||||
<h3 id="text/template/parse"><a href="/pkg/text/template/parse/">text/template/parse</a></h3>
|
||||
|
||||
|
|
@ -199,6 +269,25 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
TODO
|
||||
</p>
|
||||
|
||||
<dl id="crypto/dsa"><dt><a href="/pkg/crypto/dsa/">crypto/dsa</a></dt>
|
||||
<dd>
|
||||
<p><!-- CL 257939 -->
|
||||
The <a href="/pkg/crypto/dsa/"><code>crypto/dsa</code></a> package is now deprecated.
|
||||
See <a href="https://golang.org/issue/40337">issue #40337</a>.
|
||||
</p>
|
||||
</dd>
|
||||
</dl><!-- crypto/dsa -->
|
||||
|
||||
<dl id="crypto/x509"><dt><a href="/pkg/crypto/x509/">crypto/x509</a></dt>
|
||||
<dd>
|
||||
<p><!-- CL 257939 -->
|
||||
DSA signature verification is no longer supported. Note that DSA signature
|
||||
generation was never supported.
|
||||
See <a href="https://golang.org/issue/40337">issue #40337</a>.
|
||||
</p>
|
||||
</dd>
|
||||
</dl><!-- crypto/x509 -->
|
||||
|
||||
<dl id="net/http"><dt><a href="/pkg/net/http/">net/http</a></dt>
|
||||
<dd>
|
||||
<p><!-- CL 233637 -->
|
||||
|
|
@ -220,5 +309,31 @@ Do not send CLs removing the interior tags from such phrases.
|
|||
of the form <code>"Range": "bytes=--N"</code> where <code>"-N"</code> is a negative suffix length, for
|
||||
example <code>"Range": "bytes=--2"</code>. It now replies with a <code>416 "Range Not Satisfiable"</code> response.
|
||||
</p>
|
||||
|
||||
<p><!-- CL 256498, golang.org/issue/36990 -->
|
||||
Cookies set with <code>SameSiteDefaultMode</code> now behave according to the current
|
||||
spec (no attribute is set) instead of generating a SameSite key without a value.
|
||||
</p>
|
||||
</dd>
|
||||
</dl><!-- net/http -->
|
||||
|
||||
<dl id="runtime/debug"><dt><a href="/pkg/runtime/debug/">runtime/debug</a></dt>
|
||||
<dd>
|
||||
<p><!-- CL 249677 -->
|
||||
TODO: <a href="https://golang.org/cl/249677">https://golang.org/cl/249677</a>: provide Addr method for errors from SetPanicOnFault
|
||||
</p>
|
||||
</dd>
|
||||
</dl><!-- runtime/debug -->
|
||||
|
||||
<dl id="strconv"><dt><a href="/pkg/strconv/">strconv</a></dt>
|
||||
<dd>
|
||||
<p><!-- CL 260858 -->
|
||||
<a href="/pkg/strconv/#ParseFloat"><code>ParseFloat</code></a> now uses
|
||||
the <a
|
||||
href="https://nigeltao.github.io/blog/2020/eisel-lemire.html">Eisel-Lemire
|
||||
algorithm</a>, improving performance by up to a factor of 2. This can
|
||||
also speed up decoding textual formats like <a
|
||||
href="/pkg/encoding/json/"><code>encoding/json</code></a>.
|
||||
</p>
|
||||
</dd>
|
||||
</dl><!-- strconv -->
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<!--{
|
||||
"Title": "The Go Programming Language Specification",
|
||||
"Subtitle": "Version of Jan 14, 2020",
|
||||
"Subtitle": "Version of Sep 24, 2020",
|
||||
"Path": "/ref/spec"
|
||||
}-->
|
||||
|
||||
|
|
@ -3646,7 +3646,7 @@ For instance, <code>x / y * z</code> is the same as <code>(x / y) * z</code>.
|
|||
x <= f()
|
||||
^a >> b
|
||||
f() || g()
|
||||
x == y+1 && <-chanPtr > 0
|
||||
x == y+1 && <-chanInt > 0
|
||||
</pre>
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -666,16 +666,13 @@ For example, you should not set <code>$GOHOSTARCH</code> to
|
|||
<code>arm</code> on an x86 system.
|
||||
</p>
|
||||
|
||||
<li><code>$GO386</code> (for <code>386</code> only, default is auto-detected
|
||||
if built on either <code>386</code> or <code>amd64</code>, <code>387</code> otherwise)
|
||||
<li><code>$GO386</code> (for <code>386</code> only, defaults to <code>sse2</code>)
|
||||
<p>
|
||||
This controls the code generated by gc to use either the 387 floating-point unit
|
||||
(set to <code>387</code>) or SSE2 instructions (set to <code>sse2</code>) for
|
||||
floating point computations.
|
||||
This variable controls how gc implements floating point computations.
|
||||
</p>
|
||||
<ul>
|
||||
<li><code>GO386=387</code>: use x87 for floating point operations; should support all x86 chips (Pentium MMX or later).</li>
|
||||
<li><code>GO386=sse2</code>: use SSE2 for floating point operations; has better performance than 387, but only available on Pentium 4/Opteron/Athlon 64 or later.</li>
|
||||
<li><code>GO386=softfloat</code>: use software floating point operations; should support all x86 chips (Pentium MMX or later).</li>
|
||||
<li><code>GO386=sse2</code>: use SSE2 for floating point operations; has better performance but only available on Pentium 4/Opteron/Athlon 64 or later.</li>
|
||||
</ul>
|
||||
</li>
|
||||
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@
|
|||
# Consult https://www.iana.org/time-zones for the latest versions.
|
||||
|
||||
# Versions to use.
|
||||
CODE=2020a
|
||||
DATA=2020a
|
||||
CODE=2020b
|
||||
DATA=2020b
|
||||
|
||||
set -e
|
||||
rm -rf work
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -3,6 +3,7 @@
|
|||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !windows,!static
|
||||
// +build !darwin !internal_pie
|
||||
|
||||
#include <stdint.h>
|
||||
#include <dlfcn.h>
|
||||
|
|
|
|||
|
|
@ -3,6 +3,10 @@
|
|||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !windows,!static
|
||||
// +build !darwin !internal_pie
|
||||
|
||||
// Excluded in darwin internal linking PIE mode, as dynamic export is not
|
||||
// supported.
|
||||
|
||||
package cgotest
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build windows static
|
||||
// +build windows static darwin,internal_pie
|
||||
|
||||
package cgotest
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ import (
|
|||
|
||||
func testSigaltstack(t *testing.T) {
|
||||
switch {
|
||||
case runtime.GOOS == "solaris", runtime.GOOS == "illumos", (runtime.GOOS == "darwin" || runtime.GOOS == "ios") && runtime.GOARCH == "arm64":
|
||||
case runtime.GOOS == "solaris", runtime.GOOS == "illumos", runtime.GOOS == "ios" && runtime.GOARCH == "arm64":
|
||||
t.Skipf("switching signal stack not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
|
||||
}
|
||||
|
||||
|
|
|
|||
20
misc/cgo/test/testdata/issue41761.go
vendored
Normal file
20
misc/cgo/test/testdata/issue41761.go
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cgotest
|
||||
|
||||
/*
|
||||
typedef struct S S;
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"cgotest/issue41761a"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func test41761(t *testing.T) {
|
||||
var x issue41761a.T
|
||||
_ = (*C.struct_S)(x.X)
|
||||
}
|
||||
14
misc/cgo/test/testdata/issue41761a/a.go
vendored
Normal file
14
misc/cgo/test/testdata/issue41761a/a.go
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package issue41761a
|
||||
|
||||
/*
|
||||
typedef struct S S;
|
||||
*/
|
||||
import "C"
|
||||
|
||||
type T struct {
|
||||
X *C.S
|
||||
}
|
||||
|
|
@ -603,7 +603,7 @@ func TestExtar(t *testing.T) {
|
|||
if runtime.Compiler == "gccgo" {
|
||||
t.Skip("skipping -extar test when using gccgo")
|
||||
}
|
||||
if (runtime.GOOS == "darwin" || runtime.GOOS == "ios") && runtime.GOARCH == "arm64" {
|
||||
if runtime.GOOS == "ios" {
|
||||
t.Skip("shell scripts are not executable on iOS hosts")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,20 @@
|
|||
Go on iOS
|
||||
=========
|
||||
|
||||
For details on developing Go for iOS on macOS, see the documentation in the mobile
|
||||
subrepository:
|
||||
To run the standard library tests, run all.bash as usual, but with the compiler
|
||||
set to the clang wrapper that invokes clang for iOS. For example, this command runs
|
||||
all.bash on the iOS emulator:
|
||||
|
||||
https://github.com/golang/mobile
|
||||
GOOS=ios GOARCH=amd64 CGO_ENABLED=1 CC_FOR_TARGET=$(pwd)/../misc/ios/clangwrap.sh ./all.bash
|
||||
|
||||
It is necessary to set up the environment before running tests or programs directly on a
|
||||
device.
|
||||
To use the go tool to run individual programs and tests, put $GOROOT/bin into PATH to ensure
|
||||
the go_ios_$GOARCH_exec wrapper is found. For example, to run the archive/tar tests:
|
||||
|
||||
export PATH=$GOROOT/bin:$PATH
|
||||
GOOS=ios GOARCH=amd64 CGO_ENABLED=1 go test archive/tar
|
||||
|
||||
The go_ios_exec wrapper uses GOARCH to select the emulator (amd64) or the device (arm64).
|
||||
However, further setup is required to run tests or programs directly on a device.
|
||||
|
||||
First make sure you have a valid developer certificate and have setup your device properly
|
||||
to run apps signed by your developer certificate. Then install the libimobiledevice and
|
||||
|
|
@ -29,18 +36,10 @@ which will output something similar to
|
|||
export GOIOS_TEAM_ID=ZZZZZZZZ
|
||||
|
||||
If you have multiple devices connected, specify the device UDID with the GOIOS_DEVICE_ID
|
||||
variable. Use `idevice_id -l` to list all available UDIDs.
|
||||
variable. Use `idevice_id -l` to list all available UDIDs. Then, setting GOARCH to arm64
|
||||
will select the device:
|
||||
|
||||
Finally, to run the standard library tests, run all.bash as usual, but with the compiler
|
||||
set to the clang wrapper that invokes clang for iOS. For example,
|
||||
|
||||
GOARCH=arm64 CGO_ENABLED=1 CC_FOR_TARGET=$(pwd)/../misc/ios/clangwrap.sh ./all.bash
|
||||
|
||||
To use the go tool directly to run programs and tests, put $GOROOT/bin into PATH to ensure
|
||||
the go_darwin_$GOARCH_exec wrapper is found. For example, to run the archive/tar tests
|
||||
|
||||
export PATH=$GOROOT/bin:$PATH
|
||||
GOARCH=arm64 CGO_ENABLED=1 go test archive/tar
|
||||
GOOS=ios GOARCH=arm64 CGO_ENABLED=1 CC_FOR_TARGET=$(pwd)/../misc/ios/clangwrap.sh ./all.bash
|
||||
|
||||
Note that the go_darwin_$GOARCH_exec wrapper uninstalls any existing app identified by
|
||||
the bundle id before installing a new app. If the uninstalled app is the last app by
|
||||
|
|
|
|||
|
|
@ -2,17 +2,19 @@
|
|||
# This uses the latest available iOS SDK, which is recommended.
|
||||
# To select a specific SDK, run 'xcodebuild -showsdks'
|
||||
# to see the available SDKs and replace iphoneos with one of them.
|
||||
SDK=iphoneos
|
||||
if [ "$GOARCH" == "arm64" ]; then
|
||||
SDK=iphoneos
|
||||
PLATFORM=ios
|
||||
CLANGARCH="arm64"
|
||||
else
|
||||
SDK=iphonesimulator
|
||||
PLATFORM=ios-simulator
|
||||
CLANGARCH="x86_64"
|
||||
fi
|
||||
|
||||
SDK_PATH=`xcrun --sdk $SDK --show-sdk-path`
|
||||
export IPHONEOS_DEPLOYMENT_TARGET=5.1
|
||||
# cmd/cgo doesn't support llvm-gcc-4.2, so we have to use clang.
|
||||
CLANG=`xcrun --sdk $SDK --find clang`
|
||||
|
||||
if [ "$GOARCH" == "arm64" ]; then
|
||||
CLANGARCH="arm64"
|
||||
else
|
||||
echo "unknown GOARCH=$GOARCH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec "$CLANG" -arch $CLANGARCH -isysroot "$SDK_PATH" -mios-version-min=10.0 "$@"
|
||||
exec "$CLANG" -arch $CLANGARCH -isysroot "$SDK_PATH" -m${PLATFORM}-version-min=10.0 "$@"
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
// detect attempts to autodetect the correct
|
||||
// values of the environment variables
|
||||
// used by go_darwin_arm_exec.
|
||||
// used by go_ios_exec.
|
||||
// detect shells out to ideviceinfo, a third party program that can
|
||||
// be obtained by following the instructions at
|
||||
// https://github.com/libimobiledevice/libimobiledevice.
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This program can be used as go_darwin_arm_exec by the Go tool.
|
||||
// This program can be used as go_ios_$GOARCH_exec by the Go tool.
|
||||
// It executes binaries on an iOS device using the XCode toolchain
|
||||
// and the ios-deploy program: https://github.com/phonegap/ios-deploy
|
||||
//
|
||||
|
|
@ -34,6 +34,7 @@ import (
|
|||
"os/signal"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
|
@ -58,34 +59,16 @@ var lock *os.File
|
|||
|
||||
func main() {
|
||||
log.SetFlags(0)
|
||||
log.SetPrefix("go_darwin_arm_exec: ")
|
||||
log.SetPrefix("go_ios_exec: ")
|
||||
if debug {
|
||||
log.Println(strings.Join(os.Args, " "))
|
||||
}
|
||||
if len(os.Args) < 2 {
|
||||
log.Fatal("usage: go_darwin_arm_exec a.out")
|
||||
log.Fatal("usage: go_ios_exec a.out")
|
||||
}
|
||||
|
||||
// e.g. B393DDEB490947F5A463FD074299B6C0AXXXXXXX
|
||||
devID = getenv("GOIOS_DEV_ID")
|
||||
|
||||
// e.g. Z8B3JBXXXX.org.golang.sample, Z8B3JBXXXX prefix is available at
|
||||
// https://developer.apple.com/membercenter/index.action#accountSummary as Team ID.
|
||||
appID = getenv("GOIOS_APP_ID")
|
||||
|
||||
// e.g. Z8B3JBXXXX, available at
|
||||
// https://developer.apple.com/membercenter/index.action#accountSummary as Team ID.
|
||||
teamID = getenv("GOIOS_TEAM_ID")
|
||||
|
||||
// Device IDs as listed with ios-deploy -c.
|
||||
deviceID = os.Getenv("GOIOS_DEVICE_ID")
|
||||
|
||||
parts := strings.SplitN(appID, ".", 2)
|
||||
// For compatibility with the old builders, use a fallback bundle ID
|
||||
bundleID = "golang.gotest"
|
||||
if len(parts) == 2 {
|
||||
bundleID = parts[1]
|
||||
}
|
||||
|
||||
exitCode, err := runMain()
|
||||
if err != nil {
|
||||
|
|
@ -96,7 +79,7 @@ func main() {
|
|||
|
||||
func runMain() (int, error) {
|
||||
var err error
|
||||
tmpdir, err = ioutil.TempDir("", "go_darwin_arm_exec_")
|
||||
tmpdir, err = ioutil.TempDir("", "go_ios_exec_")
|
||||
if err != nil {
|
||||
return 1, err
|
||||
}
|
||||
|
|
@ -117,7 +100,7 @@ func runMain() (int, error) {
|
|||
//
|
||||
// The lock file is never deleted, to avoid concurrent locks on distinct
|
||||
// files with the same path.
|
||||
lockName := filepath.Join(os.TempDir(), "go_darwin_arm_exec-"+deviceID+".lock")
|
||||
lockName := filepath.Join(os.TempDir(), "go_ios_exec-"+deviceID+".lock")
|
||||
lock, err = os.OpenFile(lockName, os.O_CREATE|os.O_RDONLY, 0666)
|
||||
if err != nil {
|
||||
return 1, err
|
||||
|
|
@ -126,28 +109,12 @@ func runMain() (int, error) {
|
|||
return 1, err
|
||||
}
|
||||
|
||||
if err := uninstall(bundleID); err != nil {
|
||||
return 1, err
|
||||
if goarch := os.Getenv("GOARCH"); goarch == "arm64" {
|
||||
err = runOnDevice(appdir)
|
||||
} else {
|
||||
err = runOnSimulator(appdir)
|
||||
}
|
||||
|
||||
if err := install(appdir); err != nil {
|
||||
return 1, err
|
||||
}
|
||||
|
||||
if err := mountDevImage(); err != nil {
|
||||
return 1, err
|
||||
}
|
||||
|
||||
// Kill any hanging debug bridges that might take up port 3222.
|
||||
exec.Command("killall", "idevicedebugserverproxy").Run()
|
||||
|
||||
closer, err := startDebugBridge()
|
||||
if err != nil {
|
||||
return 1, err
|
||||
}
|
||||
defer closer()
|
||||
|
||||
if err := run(appdir, bundleID, os.Args[2:]); err != nil {
|
||||
// If the lldb driver completed with an exit code, use that.
|
||||
if err, ok := err.(*exec.ExitError); ok {
|
||||
if ws, ok := err.Sys().(interface{ ExitStatus() int }); ok {
|
||||
|
|
@ -159,6 +126,62 @@ func runMain() (int, error) {
|
|||
return 0, nil
|
||||
}
|
||||
|
||||
func runOnSimulator(appdir string) error {
|
||||
if err := installSimulator(appdir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return runSimulator(appdir, bundleID, os.Args[2:])
|
||||
}
|
||||
|
||||
func runOnDevice(appdir string) error {
|
||||
// e.g. B393DDEB490947F5A463FD074299B6C0AXXXXXXX
|
||||
devID = getenv("GOIOS_DEV_ID")
|
||||
|
||||
// e.g. Z8B3JBXXXX.org.golang.sample, Z8B3JBXXXX prefix is available at
|
||||
// https://developer.apple.com/membercenter/index.action#accountSummary as Team ID.
|
||||
appID = getenv("GOIOS_APP_ID")
|
||||
|
||||
// e.g. Z8B3JBXXXX, available at
|
||||
// https://developer.apple.com/membercenter/index.action#accountSummary as Team ID.
|
||||
teamID = getenv("GOIOS_TEAM_ID")
|
||||
|
||||
// Device IDs as listed with ios-deploy -c.
|
||||
deviceID = os.Getenv("GOIOS_DEVICE_ID")
|
||||
|
||||
parts := strings.SplitN(appID, ".", 2)
|
||||
if len(parts) == 2 {
|
||||
bundleID = parts[1]
|
||||
}
|
||||
|
||||
if err := signApp(appdir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := uninstallDevice(bundleID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := installDevice(appdir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := mountDevImage(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Kill any hanging debug bridges that might take up port 3222.
|
||||
exec.Command("killall", "idevicedebugserverproxy").Run()
|
||||
|
||||
closer, err := startDebugBridge()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer closer()
|
||||
|
||||
return runDevice(appdir, bundleID, os.Args[2:])
|
||||
}
|
||||
|
||||
func getenv(envvar string) string {
|
||||
s := os.Getenv(envvar)
|
||||
if s == "" {
|
||||
|
|
@ -191,7 +214,11 @@ func assembleApp(appdir, bin string) error {
|
|||
if err := ioutil.WriteFile(filepath.Join(appdir, "ResourceRules.plist"), []byte(resourceRules), 0744); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func signApp(appdir string) error {
|
||||
entitlementsPath := filepath.Join(tmpdir, "Entitlements.plist")
|
||||
cmd := exec.Command(
|
||||
"codesign",
|
||||
"-f",
|
||||
|
|
@ -421,7 +448,20 @@ func parsePlistDict(dict []byte) (map[string]string, error) {
|
|||
return values, nil
|
||||
}
|
||||
|
||||
func uninstall(bundleID string) error {
|
||||
func installSimulator(appdir string) error {
|
||||
cmd := exec.Command(
|
||||
"xcrun", "simctl", "install",
|
||||
"booted", // Install to the booted simulator.
|
||||
appdir,
|
||||
)
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
os.Stderr.Write(out)
|
||||
return fmt.Errorf("xcrun simctl install booted %q: %v", appdir, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func uninstallDevice(bundleID string) error {
|
||||
cmd := idevCmd(exec.Command(
|
||||
"ideviceinstaller",
|
||||
"-U", bundleID,
|
||||
|
|
@ -433,7 +473,7 @@ func uninstall(bundleID string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func install(appdir string) error {
|
||||
func installDevice(appdir string) error {
|
||||
attempt := 0
|
||||
for {
|
||||
cmd := idevCmd(exec.Command(
|
||||
|
|
@ -464,15 +504,28 @@ func idevCmd(cmd *exec.Cmd) *exec.Cmd {
|
|||
return cmd
|
||||
}
|
||||
|
||||
func run(appdir, bundleID string, args []string) error {
|
||||
var env []string
|
||||
for _, e := range os.Environ() {
|
||||
// Don't override TMPDIR, HOME, GOCACHE on the device.
|
||||
if strings.HasPrefix(e, "TMPDIR=") || strings.HasPrefix(e, "HOME=") || strings.HasPrefix(e, "GOCACHE=") {
|
||||
continue
|
||||
}
|
||||
env = append(env, e)
|
||||
func runSimulator(appdir, bundleID string, args []string) error {
|
||||
cmd := exec.Command(
|
||||
"xcrun", "simctl", "launch",
|
||||
"--wait-for-debugger",
|
||||
"booted",
|
||||
bundleID,
|
||||
)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
os.Stderr.Write(out)
|
||||
return fmt.Errorf("xcrun simctl launch booted %q: %v", bundleID, err)
|
||||
}
|
||||
var processID int
|
||||
var ignore string
|
||||
if _, err := fmt.Sscanf(string(out), "%s %d", &ignore, &processID); err != nil {
|
||||
return fmt.Errorf("runSimulator: couldn't find processID from `simctl launch`: %v (%q)", err, out)
|
||||
}
|
||||
_, err = runLLDB("ios-simulator", appdir, strconv.Itoa(processID), args)
|
||||
return err
|
||||
}
|
||||
|
||||
func runDevice(appdir, bundleID string, args []string) error {
|
||||
attempt := 0
|
||||
for {
|
||||
// The device app path reported by the device might be stale, so retry
|
||||
|
|
@ -487,37 +540,10 @@ func run(appdir, bundleID string, args []string) error {
|
|||
time.Sleep(5 * time.Second)
|
||||
continue
|
||||
}
|
||||
lldb := exec.Command(
|
||||
"python",
|
||||
"-", // Read script from stdin.
|
||||
appdir,
|
||||
deviceapp,
|
||||
)
|
||||
lldb.Args = append(lldb.Args, args...)
|
||||
lldb.Env = env
|
||||
lldb.Stdin = strings.NewReader(lldbDriver)
|
||||
lldb.Stdout = os.Stdout
|
||||
var out bytes.Buffer
|
||||
lldb.Stderr = io.MultiWriter(&out, os.Stderr)
|
||||
err = lldb.Start()
|
||||
if err == nil {
|
||||
// Forward SIGQUIT to the lldb driver which in turn will forward
|
||||
// to the running program.
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGQUIT)
|
||||
proc := lldb.Process
|
||||
go func() {
|
||||
for sig := range sigs {
|
||||
proc.Signal(sig)
|
||||
}
|
||||
}()
|
||||
err = lldb.Wait()
|
||||
signal.Stop(sigs)
|
||||
close(sigs)
|
||||
}
|
||||
out, err := runLLDB("remote-ios", appdir, deviceapp, args)
|
||||
// If the program was not started it can be retried without papering over
|
||||
// real test failures.
|
||||
started := bytes.HasPrefix(out.Bytes(), []byte("lldb: running program"))
|
||||
started := bytes.HasPrefix(out, []byte("lldb: running program"))
|
||||
if started || err == nil || attempt == 5 {
|
||||
return err
|
||||
}
|
||||
|
|
@ -528,6 +554,47 @@ func run(appdir, bundleID string, args []string) error {
|
|||
}
|
||||
}
|
||||
|
||||
func runLLDB(target, appdir, deviceapp string, args []string) ([]byte, error) {
|
||||
var env []string
|
||||
for _, e := range os.Environ() {
|
||||
// Don't override TMPDIR, HOME, GOCACHE on the device.
|
||||
if strings.HasPrefix(e, "TMPDIR=") || strings.HasPrefix(e, "HOME=") || strings.HasPrefix(e, "GOCACHE=") {
|
||||
continue
|
||||
}
|
||||
env = append(env, e)
|
||||
}
|
||||
lldb := exec.Command(
|
||||
"python",
|
||||
"-", // Read script from stdin.
|
||||
target,
|
||||
appdir,
|
||||
deviceapp,
|
||||
)
|
||||
lldb.Args = append(lldb.Args, args...)
|
||||
lldb.Env = env
|
||||
lldb.Stdin = strings.NewReader(lldbDriver)
|
||||
lldb.Stdout = os.Stdout
|
||||
var out bytes.Buffer
|
||||
lldb.Stderr = io.MultiWriter(&out, os.Stderr)
|
||||
err := lldb.Start()
|
||||
if err == nil {
|
||||
// Forward SIGQUIT to the lldb driver which in turn will forward
|
||||
// to the running program.
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGQUIT)
|
||||
proc := lldb.Process
|
||||
go func() {
|
||||
for sig := range sigs {
|
||||
proc.Signal(sig)
|
||||
}
|
||||
}()
|
||||
err = lldb.Wait()
|
||||
signal.Stop(sigs)
|
||||
close(sigs)
|
||||
}
|
||||
return out.Bytes(), err
|
||||
}
|
||||
|
||||
func copyLocalDir(dst, src string) error {
|
||||
if err := os.Mkdir(dst, 0755); err != nil {
|
||||
return err
|
||||
|
|
@ -679,6 +746,7 @@ func infoPlist(pkgpath string) string {
|
|||
<key>CFBundleSupportedPlatforms</key><array><string>iPhoneOS</string></array>
|
||||
<key>CFBundleExecutable</key><string>gotest</string>
|
||||
<key>CFBundleVersion</key><string>1.0</string>
|
||||
<key>CFBundleShortVersionString</key><string>1.0</string>
|
||||
<key>CFBundleIdentifier</key><string>` + bundleID + `</string>
|
||||
<key>CFBundleResourceSpecification</key><string>ResourceRules.plist</string>
|
||||
<key>LSRequiresIPhoneOS</key><true/>
|
||||
|
|
@ -739,7 +807,7 @@ import sys
|
|||
import os
|
||||
import signal
|
||||
|
||||
exe, device_exe, args = sys.argv[1], sys.argv[2], sys.argv[3:]
|
||||
platform, exe, device_exe_or_pid, args = sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4:]
|
||||
|
||||
env = []
|
||||
for k, v in os.environ.items():
|
||||
|
|
@ -754,17 +822,21 @@ debugger.SetAsync(True)
|
|||
debugger.SkipLLDBInitFiles(True)
|
||||
|
||||
err = lldb.SBError()
|
||||
target = debugger.CreateTarget(exe, None, 'remote-ios', True, err)
|
||||
target = debugger.CreateTarget(exe, None, platform, True, err)
|
||||
if not target.IsValid() or not err.Success():
|
||||
sys.stderr.write("lldb: failed to setup up target: %s\n" % (err))
|
||||
sys.exit(1)
|
||||
|
||||
target.modules[0].SetPlatformFileSpec(lldb.SBFileSpec(device_exe))
|
||||
|
||||
listener = debugger.GetListener()
|
||||
process = target.ConnectRemote(listener, 'connect://localhost:3222', None, err)
|
||||
|
||||
if platform == 'remote-ios':
|
||||
target.modules[0].SetPlatformFileSpec(lldb.SBFileSpec(device_exe_or_pid))
|
||||
process = target.ConnectRemote(listener, 'connect://localhost:3222', None, err)
|
||||
else:
|
||||
process = target.AttachToProcessWithID(listener, int(device_exe_or_pid), err)
|
||||
|
||||
if not err.Success():
|
||||
sys.stderr.write("lldb: failed to connect to remote target: %s\n" % (err))
|
||||
sys.stderr.write("lldb: failed to connect to remote target %s: %s\n" % (device_exe_or_pid, err))
|
||||
sys.exit(1)
|
||||
|
||||
# Don't stop on signals.
|
||||
|
|
@ -777,6 +849,25 @@ for i in range(0, sigs.GetNumSignals()):
|
|||
event = lldb.SBEvent()
|
||||
running = False
|
||||
prev_handler = None
|
||||
|
||||
def signal_handler(signal, frame):
|
||||
process.Signal(signal)
|
||||
|
||||
def run_program():
|
||||
# Forward SIGQUIT to the program.
|
||||
prev_handler = signal.signal(signal.SIGQUIT, signal_handler)
|
||||
# Tell the Go driver that the program is running and should not be retried.
|
||||
sys.stderr.write("lldb: running program\n")
|
||||
running = True
|
||||
# Process is stopped at attach/launch. Let it run.
|
||||
process.Continue()
|
||||
|
||||
if platform != 'remote-ios':
|
||||
# For the local emulator the program is ready to run.
|
||||
# For remote device runs, we need to wait for eStateConnected,
|
||||
# below.
|
||||
run_program()
|
||||
|
||||
while True:
|
||||
if not listener.WaitForEvent(1, event):
|
||||
continue
|
||||
|
|
@ -800,24 +891,22 @@ while True:
|
|||
signal.signal(signal.SIGQUIT, prev_handler)
|
||||
break
|
||||
elif state == lldb.eStateConnected:
|
||||
process.RemoteLaunch(args, env, None, None, None, None, 0, False, err)
|
||||
if not err.Success():
|
||||
sys.stderr.write("lldb: failed to launch remote process: %s\n" % (err))
|
||||
process.Kill()
|
||||
debugger.Terminate()
|
||||
sys.exit(1)
|
||||
# Forward SIGQUIT to the program.
|
||||
def signal_handler(signal, frame):
|
||||
process.Signal(signal)
|
||||
prev_handler = signal.signal(signal.SIGQUIT, signal_handler)
|
||||
# Tell the Go driver that the program is running and should not be retried.
|
||||
sys.stderr.write("lldb: running program\n")
|
||||
running = True
|
||||
# Process stops once at the beginning. Continue.
|
||||
process.Continue()
|
||||
if platform == 'remote-ios':
|
||||
process.RemoteLaunch(args, env, None, None, None, None, 0, False, err)
|
||||
if not err.Success():
|
||||
sys.stderr.write("lldb: failed to launch remote process: %s\n" % (err))
|
||||
process.Kill()
|
||||
debugger.Terminate()
|
||||
sys.exit(1)
|
||||
run_program()
|
||||
|
||||
exitStatus = process.GetExitStatus()
|
||||
exitDesc = process.GetExitDescription()
|
||||
process.Kill()
|
||||
debugger.Terminate()
|
||||
if exitStatus == 0 and exitDesc is not None:
|
||||
# Ensure tests fail when killed by a signal.
|
||||
exitStatus = 123
|
||||
|
||||
sys.exit(exitStatus)
|
||||
`
|
||||
|
|
@ -3,10 +3,10 @@
|
|||
# Use of this source code is governed by a BSD-style
|
||||
# license that can be found in the LICENSE file.
|
||||
|
||||
# Usage: buildall.sh [-e] [pattern]
|
||||
# Usage: buildall.bash [-e] [pattern]
|
||||
#
|
||||
# buildall.bash builds the standard library for all Go-supported
|
||||
# architectures. It is used by the "all-compile" trybot builder,
|
||||
# architectures. It is used by the "misc-compile" trybot builders,
|
||||
# as a smoke test to quickly flag portability issues.
|
||||
#
|
||||
# Options:
|
||||
|
|
@ -37,12 +37,11 @@ GOROOT="$(cd .. && pwd)"
|
|||
|
||||
gettargets() {
|
||||
../bin/go tool dist list | sed -e 's|/|-|'
|
||||
echo linux-386-387
|
||||
echo linux-arm-arm5
|
||||
}
|
||||
|
||||
selectedtargets() {
|
||||
gettargets | egrep -v 'android-arm|darwin-arm64' | egrep "$pattern"
|
||||
gettargets | egrep "$pattern"
|
||||
}
|
||||
|
||||
# put linux first in the target list to get all the architectures up front.
|
||||
|
|
@ -64,15 +63,11 @@ do
|
|||
echo "### Building $target"
|
||||
export GOOS=$(echo $target | sed 's/-.*//')
|
||||
export GOARCH=$(echo $target | sed 's/.*-//')
|
||||
unset GO386 GOARM
|
||||
unset GOARM
|
||||
if [ "$GOARCH" = "arm5" ]; then
|
||||
export GOARCH=arm
|
||||
export GOARM=5
|
||||
fi
|
||||
if [ "$GOARCH" = "387" ]; then
|
||||
export GOARCH=386
|
||||
export GO386=387
|
||||
fi
|
||||
|
||||
# Build and vet everything.
|
||||
# cmd/go/internal/work/exec.go enables the same vet flags during go test of std cmd
|
||||
|
|
|
|||
|
|
@ -227,19 +227,26 @@ func IndexAny(s []byte, chars string) int {
|
|||
continue
|
||||
}
|
||||
r, width = utf8.DecodeRune(s[i:])
|
||||
if r == utf8.RuneError {
|
||||
for _, r = range chars {
|
||||
if r == utf8.RuneError {
|
||||
if r != utf8.RuneError {
|
||||
// r is 2 to 4 bytes
|
||||
if len(chars) == width {
|
||||
if chars == string(r) {
|
||||
return i
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Use bytealg.IndexString for performance if available.
|
||||
if bytealg.MaxLen >= width {
|
||||
if bytealg.IndexString(chars, string(r)) >= 0 {
|
||||
return i
|
||||
}
|
||||
continue
|
||||
}
|
||||
continue
|
||||
}
|
||||
// r is 2 to 4 bytes. Using strings.Index is more reasonable, but as the bytes
|
||||
// package should not import the strings package, use bytealg.IndexString
|
||||
// instead. And this does not seem to lose much performance.
|
||||
if chars == string(r) || bytealg.IndexString(chars, string(r)) >= 0 {
|
||||
return i
|
||||
for _, ch := range chars {
|
||||
if r == ch {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
|
|
@ -304,19 +311,26 @@ func LastIndexAny(s []byte, chars string) int {
|
|||
}
|
||||
r, size := utf8.DecodeLastRune(s[:i])
|
||||
i -= size
|
||||
if r == utf8.RuneError {
|
||||
for _, r = range chars {
|
||||
if r == utf8.RuneError {
|
||||
if r != utf8.RuneError {
|
||||
// r is 2 to 4 bytes
|
||||
if len(chars) == size {
|
||||
if chars == string(r) {
|
||||
return i
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Use bytealg.IndexString for performance if available.
|
||||
if bytealg.MaxLen >= size {
|
||||
if bytealg.IndexString(chars, string(r)) >= 0 {
|
||||
return i
|
||||
}
|
||||
continue
|
||||
}
|
||||
continue
|
||||
}
|
||||
// r is 2 to 4 bytes. Using strings.Index is more reasonable, but as the bytes
|
||||
// package should not import the strings package, use bytealg.IndexString
|
||||
// instead. And this does not seem to lose much performance.
|
||||
if chars == string(r) || bytealg.IndexString(chars, string(r)) >= 0 {
|
||||
return i
|
||||
for _, ch := range chars {
|
||||
if r == ch {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
|
|
|
|||
|
|
@ -82,6 +82,17 @@ func IsARM64STLXR(op obj.As) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// IsARM64TBL reports whether the op (as defined by an arm64.A*
|
||||
// constant) is one of the TBL-like instructions and one of its
|
||||
// inputs does not fit into prog.Reg, so require special handling.
|
||||
func IsARM64TBL(op obj.As) bool {
|
||||
switch op {
|
||||
case arm64.AVTBL, arm64.AVMOVQ:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// ARM64Suffix handles the special suffix for the ARM64.
|
||||
// It returns a boolean to indicate success; failure means
|
||||
// cond was unrecognized.
|
||||
|
|
@ -125,13 +136,6 @@ func arm64RegisterNumber(name string, n int16) (int16, bool) {
|
|||
return 0, false
|
||||
}
|
||||
|
||||
// IsARM64TBL reports whether the op (as defined by an arm64.A*
|
||||
// constant) is one of the table lookup instructions that require special
|
||||
// handling.
|
||||
func IsARM64TBL(op obj.As) bool {
|
||||
return op == arm64.AVTBL
|
||||
}
|
||||
|
||||
// ARM64RegisterExtension parses an ARM64 register with extension or arrangement.
|
||||
func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error {
|
||||
Rnum := (reg & 31) + int16(num<<5)
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ func (p *Parser) asmText(operands [][]lex.Token) {
|
|||
// Argsize set below.
|
||||
},
|
||||
}
|
||||
nameAddr.Sym.Func.Text = prog
|
||||
nameAddr.Sym.Func().Text = prog
|
||||
prog.To.Val = int32(argSize)
|
||||
p.append(prog, "", true)
|
||||
}
|
||||
|
|
@ -622,8 +622,9 @@ func (p *Parser) asmInstruction(op obj.As, cond string, a []obj.Addr) {
|
|||
prog.SetFrom3(a[1])
|
||||
prog.To = a[2]
|
||||
case sys.ARM64:
|
||||
// ARM64 instructions with one input and two outputs.
|
||||
if arch.IsARM64STLXR(op) {
|
||||
switch {
|
||||
case arch.IsARM64STLXR(op):
|
||||
// ARM64 instructions with one input and two outputs.
|
||||
prog.From = a[0]
|
||||
prog.To = a[1]
|
||||
if a[2].Type != obj.TYPE_REG {
|
||||
|
|
@ -631,20 +632,16 @@ func (p *Parser) asmInstruction(op obj.As, cond string, a []obj.Addr) {
|
|||
return
|
||||
}
|
||||
prog.RegTo2 = a[2].Reg
|
||||
break
|
||||
}
|
||||
if arch.IsARM64TBL(op) {
|
||||
case arch.IsARM64TBL(op):
|
||||
// one of its inputs does not fit into prog.Reg.
|
||||
prog.From = a[0]
|
||||
if a[1].Type != obj.TYPE_REGLIST {
|
||||
p.errorf("%s: expected list; found %s", op, obj.Dconv(prog, &a[1]))
|
||||
}
|
||||
prog.SetFrom3(a[1])
|
||||
prog.To = a[2]
|
||||
break
|
||||
default:
|
||||
prog.From = a[0]
|
||||
prog.Reg = p.getRegister(prog, op, &a[1])
|
||||
prog.To = a[2]
|
||||
}
|
||||
prog.From = a[0]
|
||||
prog.Reg = p.getRegister(prog, op, &a[1])
|
||||
prog.To = a[2]
|
||||
case sys.I386:
|
||||
prog.From = a[0]
|
||||
prog.SetFrom3(a[1])
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func testEndToEnd(t *testing.T, goarch, file string) {
|
|||
architecture, ctxt := setArch(goarch)
|
||||
architecture.Init(ctxt)
|
||||
lexer := lex.NewLexer(input)
|
||||
parser := NewParser(ctxt, architecture, lexer)
|
||||
parser := NewParser(ctxt, architecture, lexer, false)
|
||||
pList := new(obj.Plist)
|
||||
var ok bool
|
||||
testOut = new(bytes.Buffer) // The assembler writes test output to this buffer.
|
||||
|
|
@ -257,11 +257,11 @@ func isHexes(s string) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// It would be nice if the error messages began with
|
||||
// It would be nice if the error messages always began with
|
||||
// the standard file:line: prefix,
|
||||
// but that's not where we are today.
|
||||
// It might be at the beginning but it might be in the middle of the printed instruction.
|
||||
var fileLineRE = regexp.MustCompile(`(?:^|\()(testdata[/\\][0-9a-z]+\.s:[0-9]+)(?:$|\))`)
|
||||
var fileLineRE = regexp.MustCompile(`(?:^|\()(testdata[/\\][0-9a-z]+\.s:[0-9]+)(?:$|\)|:)`)
|
||||
|
||||
// Same as in test/run.go
|
||||
var (
|
||||
|
|
@ -273,7 +273,7 @@ func testErrors(t *testing.T, goarch, file string) {
|
|||
input := filepath.Join("testdata", file+".s")
|
||||
architecture, ctxt := setArch(goarch)
|
||||
lexer := lex.NewLexer(input)
|
||||
parser := NewParser(ctxt, architecture, lexer)
|
||||
parser := NewParser(ctxt, architecture, lexer, false)
|
||||
pList := new(obj.Plist)
|
||||
var ok bool
|
||||
testOut = new(bytes.Buffer) // The assembler writes test output to this buffer.
|
||||
|
|
@ -281,6 +281,7 @@ func testErrors(t *testing.T, goarch, file string) {
|
|||
defer ctxt.Bso.Flush()
|
||||
failed := false
|
||||
var errBuf bytes.Buffer
|
||||
parser.errorWriter = &errBuf
|
||||
ctxt.DiagFunc = func(format string, args ...interface{}) {
|
||||
failed = true
|
||||
s := fmt.Sprintf(format, args...)
|
||||
|
|
@ -292,7 +293,7 @@ func testErrors(t *testing.T, goarch, file string) {
|
|||
pList.Firstpc, ok = parser.Parse()
|
||||
obj.Flushplist(ctxt, pList, nil, "")
|
||||
if ok && !failed {
|
||||
t.Errorf("asm: %s had no errors", goarch)
|
||||
t.Errorf("asm: %s had no errors", file)
|
||||
}
|
||||
|
||||
errors := map[string]string{}
|
||||
|
|
@ -353,12 +354,7 @@ func testErrors(t *testing.T, goarch, file string) {
|
|||
}
|
||||
|
||||
func Test386EndToEnd(t *testing.T) {
|
||||
defer func(old string) { objabi.GO386 = old }(objabi.GO386)
|
||||
for _, go386 := range []string{"387", "sse2"} {
|
||||
t.Logf("GO386=%v", go386)
|
||||
objabi.GO386 = go386
|
||||
testEndToEnd(t, "386", "386")
|
||||
}
|
||||
testEndToEnd(t, "386", "386")
|
||||
}
|
||||
|
||||
func TestARMEndToEnd(t *testing.T) {
|
||||
|
|
@ -373,6 +369,10 @@ func TestARMEndToEnd(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestGoBuildErrors(t *testing.T) {
|
||||
testErrors(t, "amd64", "buildtagerror")
|
||||
}
|
||||
|
||||
func TestARMErrors(t *testing.T) {
|
||||
testErrors(t, "arm", "armerror")
|
||||
}
|
||||
|
|
@ -442,10 +442,6 @@ func TestPPC64EndToEnd(t *testing.T) {
|
|||
testEndToEnd(t, "ppc64", "ppc64")
|
||||
}
|
||||
|
||||
func TestPPC64Encoder(t *testing.T) {
|
||||
testEndToEnd(t, "ppc64", "ppc64enc")
|
||||
}
|
||||
|
||||
func TestRISCVEncoder(t *testing.T) {
|
||||
testEndToEnd(t, "riscv64", "riscvenc")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ var exprTests = []exprTest{
|
|||
}
|
||||
|
||||
func TestExpr(t *testing.T) {
|
||||
p := NewParser(nil, nil, nil) // Expression evaluation uses none of these fields of the parser.
|
||||
p := NewParser(nil, nil, nil, false) // Expression evaluation uses none of these fields of the parser.
|
||||
for i, test := range exprTests {
|
||||
p.start(lex.Tokenize(test.input))
|
||||
result := int64(p.expr())
|
||||
|
|
@ -113,7 +113,7 @@ func TestBadExpr(t *testing.T) {
|
|||
}
|
||||
|
||||
func runBadTest(i int, test badExprTest, t *testing.T) (err error) {
|
||||
p := NewParser(nil, nil, nil) // Expression evaluation uses none of these fields of the parser.
|
||||
p := NewParser(nil, nil, nil, false) // Expression evaluation uses none of these fields of the parser.
|
||||
p.start(lex.Tokenize(test.input))
|
||||
return tryParse(t, func() {
|
||||
p.expr()
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ func testBadInstParser(t *testing.T, goarch string, tests []badInstTest) {
|
|||
for i, test := range tests {
|
||||
arch, ctxt := setArch(goarch)
|
||||
tokenizer := lex.NewTokenizer("", strings.NewReader(test.input+"\n"), nil)
|
||||
parser := NewParser(ctxt, arch, tokenizer)
|
||||
parser := NewParser(ctxt, arch, tokenizer, false)
|
||||
|
||||
err := tryParse(t, func() {
|
||||
parser.Parse()
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ func setArch(goarch string) (*arch.Arch, *obj.Link) {
|
|||
|
||||
func newParser(goarch string) *Parser {
|
||||
architecture, ctxt := setArch(goarch)
|
||||
return NewParser(ctxt, architecture, nil)
|
||||
return NewParser(ctxt, architecture, nil, false)
|
||||
}
|
||||
|
||||
// tryParse executes parse func in panicOnError=true context.
|
||||
|
|
@ -75,7 +75,12 @@ func testOperandParser(t *testing.T, parser *Parser, tests []operandTest) {
|
|||
parser.start(lex.Tokenize(test.input))
|
||||
addr := obj.Addr{}
|
||||
parser.operand(&addr)
|
||||
result := obj.Dconv(&emptyProg, &addr)
|
||||
var result string
|
||||
if parser.compilingRuntime {
|
||||
result = obj.DconvWithABIDetail(&emptyProg, &addr)
|
||||
} else {
|
||||
result = obj.Dconv(&emptyProg, &addr)
|
||||
}
|
||||
if result != test.output {
|
||||
t.Errorf("fail at %s: got %s; expected %s\n", test.input, result, test.output)
|
||||
}
|
||||
|
|
@ -86,6 +91,9 @@ func TestAMD64OperandParser(t *testing.T) {
|
|||
parser := newParser("amd64")
|
||||
testOperandParser(t, parser, amd64OperandTests)
|
||||
testBadOperandParser(t, parser, amd64BadOperandTests)
|
||||
parser.compilingRuntime = true
|
||||
testOperandParser(t, parser, amd64RuntimeOperandTests)
|
||||
testBadOperandParser(t, parser, amd64BadOperandRuntimeTests)
|
||||
}
|
||||
|
||||
func Test386OperandParser(t *testing.T) {
|
||||
|
|
@ -141,7 +149,7 @@ func TestFuncAddress(t *testing.T) {
|
|||
parser := newParser(sub.arch)
|
||||
for _, test := range sub.tests {
|
||||
parser.start(lex.Tokenize(test.input))
|
||||
name, ok := parser.funcAddress()
|
||||
name, _, ok := parser.funcAddress()
|
||||
|
||||
isFuncSym := strings.HasSuffix(test.input, "(SB)") &&
|
||||
// Ignore static symbols.
|
||||
|
|
@ -298,6 +306,11 @@ var amd64OperandTests = []operandTest{
|
|||
{"[):[o-FP", ""}, // Issue 12469 - asm hung parsing the o-FP range on non ARM platforms.
|
||||
}
|
||||
|
||||
var amd64RuntimeOperandTests = []operandTest{
|
||||
{"$bar<ABI0>(SB)", "$bar<ABI0>(SB)"},
|
||||
{"$foo<ABIInternal>(SB)", "$foo<ABIInternal>(SB)"},
|
||||
}
|
||||
|
||||
var amd64BadOperandTests = []badOperandTest{
|
||||
{"[", "register list: expected ']', found EOF"},
|
||||
{"[4", "register list: bad low register in `[4`"},
|
||||
|
|
@ -311,6 +324,11 @@ var amd64BadOperandTests = []badOperandTest{
|
|||
{"[X0-X1-X2]", "register list: expected ']' after `[X0-X1`, found '-'"},
|
||||
{"[X0,X3]", "register list: expected '-' after `[X0`, found ','"},
|
||||
{"[X0,X1,X2,X3]", "register list: expected '-' after `[X0`, found ','"},
|
||||
{"$foo<ABI0>", "ABI selector only permitted when compiling runtime, reference was to \"foo\""},
|
||||
}
|
||||
|
||||
var amd64BadOperandRuntimeTests = []badOperandTest{
|
||||
{"$foo<bletch>", "malformed ABI selector \"bletch\" in reference to \"foo\""},
|
||||
}
|
||||
|
||||
var x86OperandTests = []operandTest{
|
||||
|
|
|
|||
|
|
@ -25,24 +25,26 @@ import (
|
|||
)
|
||||
|
||||
type Parser struct {
|
||||
lex lex.TokenReader
|
||||
lineNum int // Line number in source file.
|
||||
errorLine int // Line number of last error.
|
||||
errorCount int // Number of errors.
|
||||
pc int64 // virtual PC; count of Progs; doesn't advance for GLOBL or DATA.
|
||||
input []lex.Token
|
||||
inputPos int
|
||||
pendingLabels []string // Labels to attach to next instruction.
|
||||
labels map[string]*obj.Prog
|
||||
toPatch []Patch
|
||||
addr []obj.Addr
|
||||
arch *arch.Arch
|
||||
ctxt *obj.Link
|
||||
firstProg *obj.Prog
|
||||
lastProg *obj.Prog
|
||||
dataAddr map[string]int64 // Most recent address for DATA for this symbol.
|
||||
isJump bool // Instruction being assembled is a jump.
|
||||
errorWriter io.Writer
|
||||
lex lex.TokenReader
|
||||
lineNum int // Line number in source file.
|
||||
errorLine int // Line number of last error.
|
||||
errorCount int // Number of errors.
|
||||
sawCode bool // saw code in this file (as opposed to comments and blank lines)
|
||||
pc int64 // virtual PC; count of Progs; doesn't advance for GLOBL or DATA.
|
||||
input []lex.Token
|
||||
inputPos int
|
||||
pendingLabels []string // Labels to attach to next instruction.
|
||||
labels map[string]*obj.Prog
|
||||
toPatch []Patch
|
||||
addr []obj.Addr
|
||||
arch *arch.Arch
|
||||
ctxt *obj.Link
|
||||
firstProg *obj.Prog
|
||||
lastProg *obj.Prog
|
||||
dataAddr map[string]int64 // Most recent address for DATA for this symbol.
|
||||
isJump bool // Instruction being assembled is a jump.
|
||||
compilingRuntime bool
|
||||
errorWriter io.Writer
|
||||
}
|
||||
|
||||
type Patch struct {
|
||||
|
|
@ -50,14 +52,15 @@ type Patch struct {
|
|||
label string
|
||||
}
|
||||
|
||||
func NewParser(ctxt *obj.Link, ar *arch.Arch, lexer lex.TokenReader) *Parser {
|
||||
func NewParser(ctxt *obj.Link, ar *arch.Arch, lexer lex.TokenReader, compilingRuntime bool) *Parser {
|
||||
return &Parser{
|
||||
ctxt: ctxt,
|
||||
arch: ar,
|
||||
lex: lexer,
|
||||
labels: make(map[string]*obj.Prog),
|
||||
dataAddr: make(map[string]int64),
|
||||
errorWriter: os.Stderr,
|
||||
ctxt: ctxt,
|
||||
arch: ar,
|
||||
lex: lexer,
|
||||
labels: make(map[string]*obj.Prog),
|
||||
dataAddr: make(map[string]int64),
|
||||
errorWriter: os.Stderr,
|
||||
compilingRuntime: compilingRuntime,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -132,6 +135,30 @@ func (p *Parser) ParseSymABIs(w io.Writer) bool {
|
|||
return p.errorCount == 0
|
||||
}
|
||||
|
||||
// nextToken returns the next non-build-comment token from the lexer.
|
||||
// It reports misplaced //go:build comments but otherwise discards them.
|
||||
func (p *Parser) nextToken() lex.ScanToken {
|
||||
for {
|
||||
tok := p.lex.Next()
|
||||
if tok == lex.BuildComment {
|
||||
if p.sawCode {
|
||||
p.errorf("misplaced //go:build comment")
|
||||
}
|
||||
continue
|
||||
}
|
||||
if tok != '\n' {
|
||||
p.sawCode = true
|
||||
}
|
||||
if tok == '#' {
|
||||
// A leftover wisp of a #include/#define/etc,
|
||||
// to let us know that p.sawCode should be true now.
|
||||
// Otherwise ignored.
|
||||
continue
|
||||
}
|
||||
return tok
|
||||
}
|
||||
}
|
||||
|
||||
// line consumes a single assembly line from p.lex of the form
|
||||
//
|
||||
// {label:} WORD[.cond] [ arg {, arg} ] (';' | '\n')
|
||||
|
|
@ -146,7 +173,7 @@ next:
|
|||
// Skip newlines.
|
||||
var tok lex.ScanToken
|
||||
for {
|
||||
tok = p.lex.Next()
|
||||
tok = p.nextToken()
|
||||
// We save the line number here so error messages from this instruction
|
||||
// are labeled with this line. Otherwise we complain after we've absorbed
|
||||
// the terminating newline and the line numbers are off by one in errors.
|
||||
|
|
@ -179,11 +206,11 @@ next:
|
|||
items = make([]lex.Token, 0, 3)
|
||||
}
|
||||
for {
|
||||
tok = p.lex.Next()
|
||||
tok = p.nextToken()
|
||||
if len(operands) == 0 && len(items) == 0 {
|
||||
if p.arch.InFamily(sys.ARM, sys.ARM64, sys.AMD64, sys.I386) && tok == '.' {
|
||||
// Suffixes: ARM conditionals or x86 modifiers.
|
||||
tok = p.lex.Next()
|
||||
tok = p.nextToken()
|
||||
str := p.lex.Text()
|
||||
if tok != scanner.Ident {
|
||||
p.errorf("instruction suffix expected identifier, found %s", str)
|
||||
|
|
@ -285,8 +312,8 @@ func (p *Parser) symDefRef(w io.Writer, word string, operands [][]lex.Token) {
|
|||
// Defines text symbol in operands[0].
|
||||
if len(operands) > 0 {
|
||||
p.start(operands[0])
|
||||
if name, ok := p.funcAddress(); ok {
|
||||
fmt.Fprintf(w, "def %s ABI0\n", name)
|
||||
if name, abi, ok := p.funcAddress(); ok {
|
||||
fmt.Fprintf(w, "def %s %s\n", name, abi)
|
||||
}
|
||||
}
|
||||
return
|
||||
|
|
@ -304,8 +331,8 @@ func (p *Parser) symDefRef(w io.Writer, word string, operands [][]lex.Token) {
|
|||
// Search for symbol references.
|
||||
for _, op := range operands {
|
||||
p.start(op)
|
||||
if name, ok := p.funcAddress(); ok {
|
||||
fmt.Fprintf(w, "ref %s ABI0\n", name)
|
||||
if name, abi, ok := p.funcAddress(); ok {
|
||||
fmt.Fprintf(w, "ref %s %s\n", name, abi)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -740,20 +767,19 @@ func (p *Parser) symbolReference(a *obj.Addr, name string, prefix rune) {
|
|||
case '*':
|
||||
a.Type = obj.TYPE_INDIR
|
||||
}
|
||||
// Weirdness with statics: Might now have "<>".
|
||||
isStatic := false
|
||||
if p.peek() == '<' {
|
||||
isStatic = true
|
||||
p.next()
|
||||
p.get('>')
|
||||
}
|
||||
|
||||
// Parse optional <> (indicates a static symbol) or
|
||||
// <ABIxxx> (selecting text symbol with specific ABI).
|
||||
doIssueError := true
|
||||
isStatic, abi := p.symRefAttrs(name, doIssueError)
|
||||
|
||||
if p.peek() == '+' || p.peek() == '-' {
|
||||
a.Offset = int64(p.expr())
|
||||
}
|
||||
if isStatic {
|
||||
a.Sym = p.ctxt.LookupStatic(name)
|
||||
} else {
|
||||
a.Sym = p.ctxt.Lookup(name)
|
||||
a.Sym = p.ctxt.LookupABI(name, abi)
|
||||
}
|
||||
if p.peek() == scanner.EOF {
|
||||
if prefix == 0 && p.isJump {
|
||||
|
|
@ -798,12 +824,60 @@ func (p *Parser) setPseudoRegister(addr *obj.Addr, reg string, isStatic bool, pr
|
|||
}
|
||||
}
|
||||
|
||||
// symRefAttrs parses an optional function symbol attribute clause for
|
||||
// the function symbol 'name', logging an error for a malformed
|
||||
// attribute clause if 'issueError' is true. The return value is a
|
||||
// (boolean, ABI) pair indicating that the named symbol is either
|
||||
// static or a particular ABI specification.
|
||||
//
|
||||
// The expected form of the attribute clause is:
|
||||
//
|
||||
// empty, yielding (false, obj.ABI0)
|
||||
// "<>", yielding (true, obj.ABI0)
|
||||
// "<ABI0>" yielding (false, obj.ABI0)
|
||||
// "<ABIInternal>" yielding (false, obj.ABIInternal)
|
||||
//
|
||||
// Anything else beginning with "<" logs an error if issueError is
|
||||
// true, otherwise returns (false, obj.ABI0).
|
||||
//
|
||||
func (p *Parser) symRefAttrs(name string, issueError bool) (bool, obj.ABI) {
|
||||
abi := obj.ABI0
|
||||
isStatic := false
|
||||
if p.peek() != '<' {
|
||||
return isStatic, abi
|
||||
}
|
||||
p.next()
|
||||
tok := p.peek()
|
||||
if tok == '>' {
|
||||
isStatic = true
|
||||
} else if tok == scanner.Ident {
|
||||
abistr := p.get(scanner.Ident).String()
|
||||
if !p.compilingRuntime {
|
||||
if issueError {
|
||||
p.errorf("ABI selector only permitted when compiling runtime, reference was to %q", name)
|
||||
}
|
||||
} else {
|
||||
theabi, valid := obj.ParseABI(abistr)
|
||||
if !valid {
|
||||
if issueError {
|
||||
p.errorf("malformed ABI selector %q in reference to %q",
|
||||
abistr, name)
|
||||
}
|
||||
} else {
|
||||
abi = theabi
|
||||
}
|
||||
}
|
||||
}
|
||||
p.get('>')
|
||||
return isStatic, abi
|
||||
}
|
||||
|
||||
// funcAddress parses an external function address. This is a
|
||||
// constrained form of the operand syntax that's always SB-based,
|
||||
// non-static, and has at most a simple integer offset:
|
||||
//
|
||||
// [$|*]sym[+Int](SB)
|
||||
func (p *Parser) funcAddress() (string, bool) {
|
||||
// [$|*]sym[<abi>][+Int](SB)
|
||||
func (p *Parser) funcAddress() (string, obj.ABI, bool) {
|
||||
switch p.peek() {
|
||||
case '$', '*':
|
||||
// Skip prefix.
|
||||
|
|
@ -813,25 +887,32 @@ func (p *Parser) funcAddress() (string, bool) {
|
|||
tok := p.next()
|
||||
name := tok.String()
|
||||
if tok.ScanToken != scanner.Ident || p.atStartOfRegister(name) {
|
||||
return "", false
|
||||
return "", obj.ABI0, false
|
||||
}
|
||||
// Parse optional <> (indicates a static symbol) or
|
||||
// <ABIxxx> (selecting text symbol with specific ABI).
|
||||
noErrMsg := false
|
||||
isStatic, abi := p.symRefAttrs(name, noErrMsg)
|
||||
if isStatic {
|
||||
return "", obj.ABI0, false // This function rejects static symbols.
|
||||
}
|
||||
tok = p.next()
|
||||
if tok.ScanToken == '+' {
|
||||
if p.next().ScanToken != scanner.Int {
|
||||
return "", false
|
||||
return "", obj.ABI0, false
|
||||
}
|
||||
tok = p.next()
|
||||
}
|
||||
if tok.ScanToken != '(' {
|
||||
return "", false
|
||||
return "", obj.ABI0, false
|
||||
}
|
||||
if reg := p.next(); reg.ScanToken != scanner.Ident || reg.String() != "SB" {
|
||||
return "", false
|
||||
return "", obj.ABI0, false
|
||||
}
|
||||
if p.next().ScanToken != ')' || p.peek() != scanner.EOF {
|
||||
return "", false
|
||||
return "", obj.ABI0, false
|
||||
}
|
||||
return name, true
|
||||
return name, abi, true
|
||||
}
|
||||
|
||||
// registerIndirect parses the general form of a register indirection.
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ func TestErroneous(t *testing.T) {
|
|||
{"TEXT", "$0É:0, 0, $1", "expected end of operand, found É"}, // Issue #12467.
|
||||
{"TEXT", "$:0:(SB, 0, $1", "expected '(', found 0"}, // Issue 12468.
|
||||
{"TEXT", "@B(SB),0,$0", "expected '(', found B"}, // Issue 23580.
|
||||
{"TEXT", "foo<ABIInternal>(SB),0", "ABI selector only permitted when compiling runtime, reference was to \"foo\""},
|
||||
{"FUNCDATA", "", "expect two operands for FUNCDATA"},
|
||||
{"FUNCDATA", "(SB ", "expect two operands for FUNCDATA"},
|
||||
{"DATA", "", "expect two operands for DATA"},
|
||||
|
|
|
|||
10
src/cmd/asm/internal/asm/testdata/arm64.s
vendored
10
src/cmd/asm/internal/asm/testdata/arm64.s
vendored
|
|
@ -81,6 +81,8 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
|
|||
SHA512H2 V4.D2, V3, V2 // 628464ce
|
||||
SHA512SU0 V9.D2, V8.D2 // 2881c0ce
|
||||
SHA512SU1 V7.D2, V6.D2, V5.D2 // c58867ce
|
||||
VRAX1 V26.D2, V29.D2, V30.D2 // be8f7ace
|
||||
VXAR $63, V27.D2, V21.D2, V26.D2 // bafe9bce
|
||||
VADDV V0.S4, V0 // 00b8b14e
|
||||
VMOVI $82, V0.B16 // 40e6024f
|
||||
VUADDLV V6.B16, V6 // c638306e
|
||||
|
|
@ -139,6 +141,8 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
|
|||
VTBL V14.B16, [V3.B16, V4.B16, V5.B16], V17.B16 // 71400e4e
|
||||
VTBL V13.B16, [V29.B16, V30.B16, V31.B16, V0.B16], V28.B16 // bc630d4e
|
||||
VTBL V3.B8, [V27.B16], V8.B8 // 6803030e
|
||||
VEOR3 V2.B16, V7.B16, V12.B16, V25.B16 // 990907ce
|
||||
VBCAX V1.B16, V2.B16, V26.B16, V31.B16 // 5f0722ce
|
||||
VZIP1 V16.H8, V3.H8, V19.H8 // 7338504e
|
||||
VZIP2 V22.D2, V25.D2, V21.D2 // 357bd64e
|
||||
VZIP1 V6.D2, V9.D2, V11.D2 // 2b39c64e
|
||||
|
|
@ -218,8 +222,10 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
|
|||
FMOVD $(28.0), F4 // 0490671e
|
||||
|
||||
// move a large constant to a Vd.
|
||||
FMOVD $0x8040201008040201, V20 // FMOVD $-9205322385119247871, V20
|
||||
FMOVQ $0x8040201008040202, V29 // FMOVQ $-9205322385119247870, V29
|
||||
VMOVS $0x80402010, V11 // VMOVS $2151686160, V11
|
||||
VMOVD $0x8040201008040201, V20 // VMOVD $-9205322385119247871, V20
|
||||
VMOVQ $0x7040201008040201, $0x8040201008040201, V10 // VMOVQ $8088500183983456769, $-9205322385119247871, V10
|
||||
VMOVQ $0x8040201008040202, $0x7040201008040201, V20 // VMOVQ $-9205322385119247870, $8088500183983456769, V20
|
||||
|
||||
FMOVS (R2)(R6), F4 // FMOVS (R2)(R6*1), F4 // 446866bc
|
||||
FMOVS (R2)(R6<<2), F4 // 447866bc
|
||||
|
|
|
|||
8
src/cmd/asm/internal/asm/testdata/buildtagerror.s
vendored
Normal file
8
src/cmd/asm/internal/asm/testdata/buildtagerror.s
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
#define X 1
|
||||
|
||||
//go:build x // ERROR "misplaced //go:build comment"
|
||||
|
||||
1998
src/cmd/asm/internal/asm/testdata/ppc64.s
vendored
1998
src/cmd/asm/internal/asm/testdata/ppc64.s
vendored
File diff suppressed because it is too large
Load diff
637
src/cmd/asm/internal/asm/testdata/ppc64enc.s
vendored
637
src/cmd/asm/internal/asm/testdata/ppc64enc.s
vendored
|
|
@ -1,637 +0,0 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Initial set of opcode combinations based on
|
||||
// improvements to processing of constant
|
||||
// operands.
|
||||
|
||||
// Full set will be added at a later date.
|
||||
|
||||
#include "../../../../../runtime/textflag.h"
|
||||
|
||||
TEXT asmtest(SB),DUPOK|NOSPLIT,$0
|
||||
// move constants
|
||||
MOVD $1, R3 // 38600001
|
||||
MOVD $-1, R4 // 3880ffff
|
||||
MOVD $65535, R5 // 6005ffff
|
||||
MOVD $65536, R6 // 64060001
|
||||
MOVD $-32767, R5 // 38a08001
|
||||
MOVD $-32768, R6 // 38c08000
|
||||
MOVD $1234567, R5 // 6405001260a5d687
|
||||
MOVW $1, R3 // 38600001
|
||||
MOVW $-1, R4 // 3880ffff
|
||||
MOVW $65535, R5 // 6005ffff
|
||||
MOVW $65536, R6 // 64060001
|
||||
MOVW $-32767, R5 // 38a08001
|
||||
MOVW $-32768, R6 // 38c08000
|
||||
MOVW $1234567, R5 // 6405001260a5d687
|
||||
MOVD 8(R3), R4 // e8830008
|
||||
MOVD (R3)(R4), R5 // 7ca4182a
|
||||
MOVW 4(R3), R4 // e8830006
|
||||
MOVW (R3)(R4), R5 // 7ca41aaa
|
||||
MOVWZ 4(R3), R4 // 80830004
|
||||
MOVWZ (R3)(R4), R5 // 7ca4182e
|
||||
MOVH 4(R3), R4 // a8830004
|
||||
MOVH (R3)(R4), R5 // 7ca41aae
|
||||
MOVHZ 2(R3), R4 // a0830002
|
||||
MOVHZ (R3)(R4), R5 // 7ca41a2e
|
||||
MOVB 1(R3), R4 // 888300017c840774
|
||||
MOVB (R3)(R4), R5 // 7ca418ae7ca50774
|
||||
MOVBZ 1(R3), R4 // 88830001
|
||||
MOVBZ (R3)(R4), R5 // 7ca418ae
|
||||
MOVDBR (R3)(R4), R5 // 7ca41c28
|
||||
MOVWBR (R3)(R4), R5 // 7ca41c2c
|
||||
MOVHBR (R3)(R4), R5 // 7ca41e2c
|
||||
|
||||
MOVDU 8(R3), R4 // e8830009
|
||||
MOVDU (R3)(R4), R5 // 7ca4186a
|
||||
MOVWU (R3)(R4), R5 // 7ca41aea
|
||||
MOVWZU 4(R3), R4 // 84830004
|
||||
MOVWZU (R3)(R4), R5 // 7ca4186e
|
||||
MOVHU 2(R3), R4 // ac830002
|
||||
MOVHU (R3)(R4), R5 // 7ca41aee
|
||||
MOVHZU 2(R3), R4 // a4830002
|
||||
MOVHZU (R3)(R4), R5 // 7ca41a6e
|
||||
MOVBU 1(R3), R4 // 8c8300017c840774
|
||||
MOVBU (R3)(R4), R5 // 7ca418ee7ca50774
|
||||
MOVBZU 1(R3), R4 // 8c830001
|
||||
MOVBZU (R3)(R4), R5 // 7ca418ee
|
||||
|
||||
MOVD R4, 8(R3) // f8830008
|
||||
MOVD R5, (R3)(R4) // 7ca4192a
|
||||
MOVW R4, 4(R3) // 90830004
|
||||
MOVW R5, (R3)(R4) // 7ca4192e
|
||||
MOVH R4, 2(R3) // b0830002
|
||||
MOVH R5, (R3)(R4) // 7ca41b2e
|
||||
MOVB R4, 1(R3) // 98830001
|
||||
MOVB R5, (R3)(R4) // 7ca419ae
|
||||
MOVDBR R5, (R3)(R4) // 7ca41d28
|
||||
MOVWBR R5, (R3)(R4) // 7ca41d2c
|
||||
MOVHBR R5, (R3)(R4) // 7ca41f2c
|
||||
|
||||
MOVDU R4, 8(R3) // f8830009
|
||||
MOVDU R5, (R3)(R4) // 7ca4196a
|
||||
MOVWU R4, 4(R3) // 94830004
|
||||
MOVWU R5, (R3)(R4) // 7ca4196e
|
||||
MOVHU R4, 2(R3) // b4830002
|
||||
MOVHU R5, (R3)(R4) // 7ca41b6e
|
||||
MOVBU R4, 1(R3) // 9c830001
|
||||
MOVBU R5, (R3)(R4) // 7ca419ee
|
||||
|
||||
ADD $1, R3 // 38630001
|
||||
ADD $1, R3, R4 // 38830001
|
||||
ADD $-1, R4 // 3884ffff
|
||||
ADD $-1, R4, R5 // 38a4ffff
|
||||
ADD $65535, R5 // 601fffff7cbf2a14
|
||||
ADD $65535, R5, R6 // 601fffff7cdf2a14
|
||||
ADD $65536, R6 // 3cc60001
|
||||
ADD $65536, R6, R7 // 3ce60001
|
||||
ADD $-32767, R5 // 38a58001
|
||||
ADD $-32767, R5, R4 // 38858001
|
||||
ADD $-32768, R6 // 38c68000
|
||||
ADD $-32768, R6, R5 // 38a68000
|
||||
ADD $1234567, R5 // 641f001263ffd6877cbf2a14
|
||||
ADD $1234567, R5, R6 // 641f001263ffd6877cdf2a14
|
||||
ADDIS $8, R3 // 3c630008
|
||||
ADDIS $1000, R3, R4 // 3c8303e8
|
||||
|
||||
ANDCC $1, R3 // 70630001
|
||||
ANDCC $1, R3, R4 // 70640001
|
||||
ANDCC $-1, R4 // 3be0ffff7fe42039
|
||||
ANDCC $-1, R4, R5 // 3be0ffff7fe52039
|
||||
ANDCC $65535, R5 // 70a5ffff
|
||||
ANDCC $65535, R5, R6 // 70a6ffff
|
||||
ANDCC $65536, R6 // 74c60001
|
||||
ANDCC $65536, R6, R7 // 74c70001
|
||||
ANDCC $-32767, R5 // 3be080017fe52839
|
||||
ANDCC $-32767, R5, R4 // 3be080017fe42839
|
||||
ANDCC $-32768, R6 // 3be080007fe63039
|
||||
ANDCC $-32768, R5, R6 // 3be080007fe62839
|
||||
ANDCC $1234567, R5 // 641f001263ffd6877fe52839
|
||||
ANDCC $1234567, R5, R6 // 641f001263ffd6877fe62839
|
||||
ANDISCC $1, R3 // 74630001
|
||||
ANDISCC $1000, R3, R4 // 746403e8
|
||||
|
||||
OR $1, R3 // 60630001
|
||||
OR $1, R3, R4 // 60640001
|
||||
OR $-1, R4 // 3be0ffff7fe42378
|
||||
OR $-1, R4, R5 // 3be0ffff7fe52378
|
||||
OR $65535, R5 // 60a5ffff
|
||||
OR $65535, R5, R6 // 60a6ffff
|
||||
OR $65536, R6 // 64c60001
|
||||
OR $65536, R6, R7 // 64c70001
|
||||
OR $-32767, R5 // 3be080017fe52b78
|
||||
OR $-32767, R5, R6 // 3be080017fe62b78
|
||||
OR $-32768, R6 // 3be080007fe63378
|
||||
OR $-32768, R6, R7 // 3be080007fe73378
|
||||
OR $1234567, R5 // 641f001263ffd6877fe52b78
|
||||
OR $1234567, R5, R3 // 641f001263ffd6877fe32b78
|
||||
|
||||
XOR $1, R3 // 68630001
|
||||
XOR $1, R3, R4 // 68640001
|
||||
XOR $-1, R4 // 3be0ffff7fe42278
|
||||
XOR $-1, R4, R5 // 3be0ffff7fe52278
|
||||
XOR $65535, R5 // 68a5ffff
|
||||
XOR $65535, R5, R6 // 68a6ffff
|
||||
XOR $65536, R6 // 6cc60001
|
||||
XOR $65536, R6, R7 // 6cc70001
|
||||
XOR $-32767, R5 // 3be080017fe52a78
|
||||
XOR $-32767, R5, R6 // 3be080017fe62a78
|
||||
XOR $-32768, R6 // 3be080007fe63278
|
||||
XOR $-32768, R6, R7 // 3be080007fe73278
|
||||
XOR $1234567, R5 // 641f001263ffd6877fe52a78
|
||||
XOR $1234567, R5, R3 // 641f001263ffd6877fe32a78
|
||||
|
||||
// TODO: the order of CR operands don't match
|
||||
CMP R3, R4 // 7c232000
|
||||
CMPU R3, R4 // 7c232040
|
||||
CMPW R3, R4 // 7c032000
|
||||
CMPWU R3, R4 // 7c032040
|
||||
|
||||
// TODO: constants for ADDC?
|
||||
ADD R3, R4 // 7c841a14
|
||||
ADD R3, R4, R5 // 7ca41a14
|
||||
ADDC R3, R4 // 7c841814
|
||||
ADDC R3, R4, R5 // 7ca41814
|
||||
ADDE R3, R4 // 7c841914
|
||||
ADDECC R3, R4 // 7c841915
|
||||
ADDEV R3, R4 // 7c841d14
|
||||
ADDEVCC R3, R4 // 7c841d15
|
||||
ADDV R3, R4 // 7c841e14
|
||||
ADDVCC R3, R4 // 7c841e15
|
||||
ADDCCC R3, R4, R5 // 7ca41815
|
||||
ADDME R3, R4 // 7c8301d4
|
||||
ADDMECC R3, R4 // 7c8301d5
|
||||
ADDMEV R3, R4 // 7c8305d4
|
||||
ADDMEVCC R3, R4 // 7c8305d5
|
||||
ADDCV R3, R4 // 7c841c14
|
||||
ADDCVCC R3, R4 // 7c841c15
|
||||
ADDZE R3, R4 // 7c830194
|
||||
ADDZECC R3, R4 // 7c830195
|
||||
ADDZEV R3, R4 // 7c830594
|
||||
ADDZEVCC R3, R4 // 7c830595
|
||||
SUBME R3, R4 // 7c8301d0
|
||||
SUBMECC R3, R4 // 7c8301d1
|
||||
SUBMEV R3, R4 // 7c8305d0
|
||||
SUBZE R3, R4 // 7c830190
|
||||
SUBZECC R3, R4 // 7c830191
|
||||
SUBZEV R3, R4 // 7c830590
|
||||
SUBZEVCC R3, R4 // 7c830591
|
||||
|
||||
AND R3, R4 // 7c841838
|
||||
AND R3, R4, R5 // 7c851838
|
||||
ANDN R3, R4, R5 // 7c851878
|
||||
ANDCC R3, R4, R5 // 7c851839
|
||||
OR R3, R4 // 7c841b78
|
||||
OR R3, R4, R5 // 7c851b78
|
||||
ORN R3, R4, R5 // 7c851b38
|
||||
ORCC R3, R4, R5 // 7c851b79
|
||||
XOR R3, R4 // 7c841a78
|
||||
XOR R3, R4, R5 // 7c851a78
|
||||
XORCC R3, R4, R5 // 7c851a79
|
||||
NAND R3, R4, R5 // 7c851bb8
|
||||
NANDCC R3, R4, R5 // 7c851bb9
|
||||
EQV R3, R4, R5 // 7c851a38
|
||||
EQVCC R3, R4, R5 // 7c851a39
|
||||
NOR R3, R4, R5 // 7c8518f8
|
||||
NORCC R3, R4, R5 // 7c8518f9
|
||||
|
||||
SUB R3, R4 // 7c832050
|
||||
SUB R3, R4, R5 // 7ca32050
|
||||
SUBC R3, R4 // 7c832010
|
||||
SUBC R3, R4, R5 // 7ca32010
|
||||
|
||||
MULLW R3, R4 // 7c8419d6
|
||||
MULLW R3, R4, R5 // 7ca419d6
|
||||
MULLWCC R3, R4, R5 // 7ca419d7
|
||||
MULHW R3, R4, R5 // 7ca41896
|
||||
|
||||
MULHWU R3, R4, R5 // 7ca41816
|
||||
MULLD R3, R4 // 7c8419d2
|
||||
MULLD R4, R4, R5 // 7ca421d2
|
||||
MULLDCC R3, R4, R5 // 7ca419d3
|
||||
MULHD R3, R4, R5 // 7ca41892
|
||||
MULHDCC R3, R4, R5 // 7ca41893
|
||||
|
||||
MULLWV R3, R4 // 7c841dd6
|
||||
MULLWV R3, R4, R5 // 7ca41dd6
|
||||
MULLWVCC R3, R4, R5 // 7ca41dd7
|
||||
MULHWUCC R3, R4, R5 // 7ca41817
|
||||
MULLDV R3, R4, R5 // 7ca41dd2
|
||||
MULLDVCC R3, R4, R5 // 7ca41dd3
|
||||
|
||||
DIVD R3,R4 // 7c841bd2
|
||||
DIVD R3, R4, R5 // 7ca41bd2
|
||||
DIVDCC R3,R4, R5 // 7ca41bd3
|
||||
DIVDU R3, R4, R5 // 7ca41b92
|
||||
DIVDV R3, R4, R5 // 7ca41fd2
|
||||
DIVDUCC R3, R4, R5 // 7ca41b93
|
||||
DIVDVCC R3, R4, R5 // 7ca41fd3
|
||||
DIVDUV R3, R4, R5 // 7ca41f92
|
||||
DIVDUVCC R3, R4, R5 // 7ca41f93
|
||||
DIVDE R3, R4, R5 // 7ca41b52
|
||||
DIVDECC R3, R4, R5 // 7ca41b53
|
||||
DIVDEU R3, R4, R5 // 7ca41b12
|
||||
DIVDEUCC R3, R4, R5 // 7ca41b13
|
||||
|
||||
REM R3, R4, R5 // 7fe41bd67fff19d67cbf2050
|
||||
REMU R3, R4, R5 // 7fe41b967fff19d67bff00287cbf2050
|
||||
REMD R3, R4, R5 // 7fe41bd27fff19d27cbf2050
|
||||
REMDU R3, R4, R5 // 7fe41b927fff19d27cbf2050
|
||||
|
||||
MODUD R3, R4, R5 // 7ca41a12
|
||||
MODUW R3, R4, R5 // 7ca41a16
|
||||
MODSD R3, R4, R5 // 7ca41e12
|
||||
MODSW R3, R4, R5 // 7ca41e16
|
||||
|
||||
SLW $8, R3, R4 // 5464402e
|
||||
SLW R3, R4, R5 // 7c851830
|
||||
SLWCC R3, R4 // 7c841831
|
||||
SLD $16, R3, R4 // 786483e4
|
||||
SLD R3, R4, R5 // 7c851836
|
||||
SLDCC R3, R4 // 7c841837
|
||||
|
||||
SRW $8, R3, R4 // 5464c23e
|
||||
SRW R3, R4, R5 // 7c851c30
|
||||
SRWCC R3, R4 // 7c841c31
|
||||
SRAW $8, R3, R4 // 7c644670
|
||||
SRAW R3, R4, R5 // 7c851e30
|
||||
SRAWCC R3, R4 // 7c841e31
|
||||
SRD $16, R3, R4 // 78648402
|
||||
SRD R3, R4, R5 // 7c851c36
|
||||
SRDCC R3, R4 // 7c841c37
|
||||
SRAD $16, R3, R4 // 7c648674
|
||||
SRAD R3, R4, R5 // 7c851e34
|
||||
SRDCC R3, R4 // 7c841c37
|
||||
ROTLW $16, R3, R4 // 5464803e
|
||||
ROTLW R3, R4, R5 // 5c85183e
|
||||
RLWMI $7, R3, $65535, R6 // 50663c3e
|
||||
RLWMICC $7, R3, $65535, R6 // 50663c3f
|
||||
RLWNM $3, R4, $7, R6 // 54861f7e
|
||||
RLWNMCC $3, R4, $7, R6 // 54861f7f
|
||||
RLDMI $0, R4, $7, R6 // 7886076c
|
||||
RLDMICC $0, R4, $7, R6 // 7886076d
|
||||
RLDIMI $0, R4, $7, R6 // 788601cc
|
||||
RLDIMICC $0, R4, $7, R6 // 788601cd
|
||||
RLDC $0, R4, $15, R6 // 78860728
|
||||
RLDCCC $0, R4, $15, R6 // 78860729
|
||||
RLDCL $0, R4, $7, R6 // 78860770
|
||||
RLDCLCC $0, R4, $15, R6 // 78860721
|
||||
RLDCR $0, R4, $-16, R6 // 788606f2
|
||||
RLDCRCC $0, R4, $-16, R6 // 788606f3
|
||||
RLDICL $0, R4, $15, R6 // 788603c0
|
||||
RLDICLCC $0, R4, $15, R6 // 788603c1
|
||||
RLDICR $0, R4, $15, R6 // 788603c4
|
||||
RLDICRCC $0, R4, $15, R6 // 788603c5
|
||||
RLDIC $0, R4, $15, R6 // 788603c8
|
||||
RLDICCC $0, R4, $15, R6 // 788603c9
|
||||
CLRLSLWI $16, R5, $8, R4 // 54a4861e
|
||||
CLRLSLDI $2, R4, $24, R3 // 78831588
|
||||
|
||||
BEQ 0(PC) // 41820000
|
||||
BGE 0(PC) // 40800000
|
||||
BGT 4(PC) // 41810030
|
||||
BLE 0(PC) // 40810000
|
||||
BLT 0(PC) // 41800000
|
||||
BNE 0(PC) // 40820000
|
||||
JMP 8(PC) // 48000020
|
||||
|
||||
CRAND CR1, CR2, CR3 // 4c620a02
|
||||
CRANDN CR1, CR2, CR3 // 4c620902
|
||||
CREQV CR1, CR2, CR3 // 4c620a42
|
||||
CRNAND CR1, CR2, CR3 // 4c6209c2
|
||||
CRNOR CR1, CR2, CR3 // 4c620842
|
||||
CROR CR1, CR2, CR3 // 4c620b82
|
||||
CRORN CR1, CR2, CR3 // 4c620b42
|
||||
CRXOR CR1, CR2, CR3 // 4c620982
|
||||
|
||||
ISEL $1, R3, R4, R5 // 7ca3205e
|
||||
ISEL $0, R3, R4, R5 // 7ca3201e
|
||||
ISEL $2, R3, R4, R5 // 7ca3209e
|
||||
ISEL $3, R3, R4, R5 // 7ca320de
|
||||
ISEL $4, R3, R4, R5 // 7ca3211e
|
||||
POPCNTB R3, R4 // 7c6400f4
|
||||
POPCNTW R3, R4 // 7c6402f4
|
||||
POPCNTD R3, R4 // 7c6403f4
|
||||
|
||||
PASTECC R3, R4 // 7c23270d
|
||||
COPY R3, R4 // 7c23260c
|
||||
|
||||
// load-and-reserve
|
||||
LBAR (R4)(R3*1),$1,R5 // 7ca32069
|
||||
LBAR (R4),$0,R5 // 7ca02068
|
||||
LBAR (R3),R5 // 7ca01868
|
||||
LHAR (R4)(R3*1),$1,R5 // 7ca320e9
|
||||
LHAR (R4),$0,R5 // 7ca020e8
|
||||
LHAR (R3),R5 // 7ca018e8
|
||||
LWAR (R4)(R3*1),$1,R5 // 7ca32029
|
||||
LWAR (R4),$0,R5 // 7ca02028
|
||||
LWAR (R3),R5 // 7ca01828
|
||||
LDAR (R4)(R3*1),$1,R5 // 7ca320a9
|
||||
LDAR (R4),$0,R5 // 7ca020a8
|
||||
LDAR (R3),R5 // 7ca018a8
|
||||
|
||||
STBCCC R3, (R4)(R5) // 7c65256d
|
||||
STWCCC R3, (R4)(R5) // 7c65212d
|
||||
STDCCC R3, (R4)(R5) // 7c6521ad
|
||||
STHCCC R3, (R4)(R5)
|
||||
|
||||
SYNC // 7c0004ac
|
||||
ISYNC // 4c00012c
|
||||
LWSYNC // 7c2004ac
|
||||
|
||||
DCBF (R3)(R4) // 7c0418ac
|
||||
DCBI (R3)(R4) // 7c041bac
|
||||
DCBST (R3)(R4) // 7c04186c
|
||||
DCBZ (R3)(R4) // 7c041fec
|
||||
DCBT (R3)(R4) // 7c041a2c
|
||||
ICBI (R3)(R4) // 7c041fac
|
||||
|
||||
// float constants
|
||||
FMOVD $(0.0), F1 // f0210cd0
|
||||
FMOVD $(-0.0), F1 // f0210cd0fc200850
|
||||
|
||||
FMOVD 8(R3), F1 // c8230008
|
||||
FMOVD (R3)(R4), F1 // 7c241cae
|
||||
FMOVDU 8(R3), F1 // cc230008
|
||||
FMOVDU (R3)(R4), F1 // 7c241cee
|
||||
FMOVS 4(R3), F1 // c0230004
|
||||
FMOVS (R3)(R4), F1 // 7c241c2e
|
||||
FMOVSU 4(R3), F1 // c4230004
|
||||
FMOVSU (R3)(R4), F1 // 7c241c6e
|
||||
|
||||
FMOVD F1, 8(R3) // d8230008
|
||||
FMOVD F1, (R3)(R4) // 7c241dae
|
||||
FMOVDU F1, 8(R3) // dc230008
|
||||
FMOVDU F1, (R3)(R4) // 7c241dee
|
||||
FMOVS F1, 4(R3) // d0230004
|
||||
FMOVS F1, (R3)(R4) // 7c241d2e
|
||||
FMOVSU F1, 4(R3) // d4230004
|
||||
FMOVSU F1, (R3)(R4) // 7c241d6e
|
||||
FADD F1, F2 // fc42082a
|
||||
FADD F1, F2, F3 // fc62082a
|
||||
FADDCC F1, F2, F3 // fc62082b
|
||||
FADDS F1, F2 // ec42082a
|
||||
FADDS F1, F2, F3 // ec62082a
|
||||
FADDSCC F1, F2, F3 // ec62082b
|
||||
FSUB F1, F2 // fc420828
|
||||
FSUB F1, F2, F3 // fc620828
|
||||
FSUBCC F1, F2, F3 // fc620829
|
||||
FSUBS F1, F2 // ec420828
|
||||
FSUBS F1, F2, F3 // ec620828
|
||||
FSUBCC F1, F2, F3 // fc620829
|
||||
FMUL F1, F2 // fc420072
|
||||
FMUL F1, F2, F3 // fc620072
|
||||
FMULCC F1, F2, F3 // fc620073
|
||||
FMULS F1, F2 // ec420072
|
||||
FMULS F1, F2, F3 // ec620072
|
||||
FMULSCC F1, F2, F3 // ec620073
|
||||
FDIV F1, F2 // fc420824
|
||||
FDIV F1, F2, F3 // fc620824
|
||||
FDIVCC F1, F2, F3 // fc620825
|
||||
FDIVS F1, F2 // ec420824
|
||||
FDIVS F1, F2, F3 // ec620824
|
||||
FDIVSCC F1, F2, F3 // ec620825
|
||||
FMADD F1, F2, F3, F4 // fc8110fa
|
||||
FMADDCC F1, F2, F3, F4 // fc8110fb
|
||||
FMADDS F1, F2, F3, F4 // ec8110fa
|
||||
FMADDSCC F1, F2, F3, F4 // ec8110fb
|
||||
FMSUB F1, F2, F3, F4 // fc8110f8
|
||||
FMSUBCC F1, F2, F3, F4 // fc8110f9
|
||||
FMSUBS F1, F2, F3, F4 // ec8110f8
|
||||
FMSUBSCC F1, F2, F3, F4 // ec8110f9
|
||||
FNMADD F1, F2, F3, F4 // fc8110fe
|
||||
FNMADDCC F1, F2, F3, F4 // fc8110ff
|
||||
FNMADDS F1, F2, F3, F4 // ec8110fe
|
||||
FNMADDSCC F1, F2, F3, F4 // ec8110ff
|
||||
FNMSUB F1, F2, F3, F4 // fc8110fc
|
||||
FNMSUBCC F1, F2, F3, F4 // fc8110fd
|
||||
FNMSUBS F1, F2, F3, F4 // ec8110fc
|
||||
FNMSUBSCC F1, F2, F3, F4 // ec8110fd
|
||||
FSEL F1, F2, F3, F4 // fc8110ee
|
||||
FSELCC F1, F2, F3, F4 // fc8110ef
|
||||
FABS F1, F2 // fc400a10
|
||||
FABSCC F1, F2 // fc400a11
|
||||
FNEG F1, F2 // fc400850
|
||||
FABSCC F1, F2 // fc400a11
|
||||
FRSP F1, F2 // fc400818
|
||||
FRSPCC F1, F2 // fc400819
|
||||
FCTIW F1, F2 // fc40081c
|
||||
FCTIWCC F1, F2 // fc40081d
|
||||
FCTIWZ F1, F2 // fc40081e
|
||||
FCTIWZCC F1, F2 // fc40081f
|
||||
FCTID F1, F2 // fc400e5c
|
||||
FCTIDCC F1, F2 // fc400e5d
|
||||
FCTIDZ F1, F2 // fc400e5e
|
||||
FCTIDZCC F1, F2 // fc400e5f
|
||||
FCFID F1, F2 // fc400e9c
|
||||
FCFIDCC F1, F2 // fc400e9d
|
||||
FCFIDU F1, F2 // fc400f9c
|
||||
FCFIDUCC F1, F2 // fc400f9d
|
||||
FCFIDS F1, F2 // ec400e9c
|
||||
FCFIDSCC F1, F2 // ec400e9d
|
||||
FRES F1, F2 // ec400830
|
||||
FRESCC F1, F2 // ec400831
|
||||
FRIM F1, F2 // fc400bd0
|
||||
FRIMCC F1, F2 // fc400bd1
|
||||
FRIP F1, F2 // fc400b90
|
||||
FRIPCC F1, F2 // fc400b91
|
||||
FRIZ F1, F2 // fc400b50
|
||||
FRIZCC F1, F2 // fc400b51
|
||||
FRIN F1, F2 // fc400b10
|
||||
FRINCC F1, F2 // fc400b11
|
||||
FRSQRTE F1, F2 // fc400834
|
||||
FRSQRTECC F1, F2 // fc400835
|
||||
FSQRT F1, F2 // fc40082c
|
||||
FSQRTCC F1, F2 // fc40082d
|
||||
FSQRTS F1, F2 // ec40082c
|
||||
FSQRTSCC F1, F2 // ec40082d
|
||||
FCPSGN F1, F2 // fc420810
|
||||
FCPSGNCC F1, F2 // fc420811
|
||||
FCMPO F1, F2 // fc011040
|
||||
FCMPU F1, F2 // fc011000
|
||||
LVX (R3)(R4), V1 // 7c2418ce
|
||||
LVXL (R3)(R4), V1 // 7c241ace
|
||||
LVSL (R3)(R4), V1 // 7c24180c
|
||||
LVSR (R3)(R4), V1 // 7c24184c
|
||||
LVEBX (R3)(R4), V1 // 7c24180e
|
||||
LVEHX (R3)(R4), V1 // 7c24184e
|
||||
LVEWX (R3)(R4), V1 // 7c24188e
|
||||
STVX V1, (R3)(R4) // 7c2419ce
|
||||
STVXL V1, (R3)(R4) // 7c241bce
|
||||
STVEBX V1, (R3)(R4) // 7c24190e
|
||||
STVEHX V1, (R3)(R4) // 7c24194e
|
||||
STVEWX V1, (R3)(R4) // 7c24198e
|
||||
|
||||
VAND V1, V2, V3 // 10611404
|
||||
VANDC V1, V2, V3 // 10611444
|
||||
VNAND V1, V2, V3 // 10611584
|
||||
VOR V1, V2, V3 // 10611484
|
||||
VORC V1, V2, V3 // 10611544
|
||||
VXOR V1, V2, V3 // 106114c4
|
||||
VNOR V1, V2, V3 // 10611504
|
||||
VEQV V1, V2, V3 // 10611684
|
||||
VADDUBM V1, V2, V3 // 10611000
|
||||
VADDUHM V1, V2, V3 // 10611040
|
||||
VADDUWM V1, V2, V3 // 10611080
|
||||
VADDUDM V1, V2, V3 // 106110c0
|
||||
VADDUQM V1, V2, V3 // 10611100
|
||||
VADDCUQ V1, V2, V3 // 10611140
|
||||
VADDCUW V1, V2, V3 // 10611180
|
||||
VADDUBS V1, V2, V3 // 10611200
|
||||
VADDUHS V1, V2, V3 // 10611240
|
||||
VADDUWS V1, V2, V3 // 10611280
|
||||
VSUBUBM V1, V2, V3 // 10611400
|
||||
VSUBUHM V1, V2, V3 // 10611440
|
||||
VSUBUWM V1, V2, V3 // 10611480
|
||||
VSUBUDM V1, V2, V3 // 106114c0
|
||||
VSUBUQM V1, V2, V3 // 10611500
|
||||
VSUBCUQ V1, V2, V3 // 10611540
|
||||
VSUBCUW V1, V2, V3 // 10611580
|
||||
VSUBUBS V1, V2, V3 // 10611600
|
||||
VSUBUHS V1, V2, V3 // 10611640
|
||||
VSUBUWS V1, V2, V3 // 10611680
|
||||
VSUBSBS V1, V2, V3 // 10611700
|
||||
VSUBSHS V1, V2, V3 // 10611740
|
||||
VSUBSWS V1, V2, V3 // 10611780
|
||||
VSUBEUQM V1, V2, V3, V4 // 108110fe
|
||||
VSUBECUQ V1, V2, V3, V4 // 108110ff
|
||||
VMULESB V1, V2, V3 // 10611308
|
||||
VMULOSB V1, V2, V3 // 10611108
|
||||
VMULEUB V1, V2, V3 // 10611208
|
||||
VMULOUB V1, V2, V3 // 10611008
|
||||
VMULESH V1, V2, V3 // 10611348
|
||||
VMULOSH V1, V2, V3 // 10611148
|
||||
VMULEUH V1, V2, V3 // 10611248
|
||||
VMULOUH V1, V2, V3 // 10611048
|
||||
VMULESH V1, V2, V3 // 10611348
|
||||
VMULOSW V1, V2, V3 // 10611188
|
||||
VMULEUW V1, V2, V3 // 10611288
|
||||
VMULOUW V1, V2, V3 // 10611088
|
||||
VMULUWM V1, V2, V3 // 10611089
|
||||
VPMSUMB V1, V2, V3 // 10611408
|
||||
VPMSUMH V1, V2, V3 // 10611448
|
||||
VPMSUMW V1, V2, V3 // 10611488
|
||||
VPMSUMD V1, V2, V3 // 106114c8
|
||||
VMSUMUDM V1, V2, V3, V4 // 108110e3
|
||||
VRLB V1, V2, V3 // 10611004
|
||||
VRLH V1, V2, V3 // 10611044
|
||||
VRLW V1, V2, V3 // 10611084
|
||||
VRLD V1, V2, V3 // 106110c4
|
||||
VSLB V1, V2, V3 // 10611104
|
||||
VSLH V1, V2, V3 // 10611144
|
||||
VSLW V1, V2, V3 // 10611184
|
||||
VSL V1, V2, V3 // 106111c4
|
||||
VSLO V1, V2, V3 // 1061140c
|
||||
VSRB V1, V2, V3 // 10611204
|
||||
VSRH V1, V2, V3 // 10611244
|
||||
VSRW V1, V2, V3 // 10611284
|
||||
VSR V1, V2, V3 // 106112c4
|
||||
VSRO V1, V2, V3 // 1061144c
|
||||
VSLD V1, V2, V3 // 106115c4
|
||||
VSRAB V1, V2, V3 // 10611304
|
||||
VSRAH V1, V2, V3 // 10611344
|
||||
VSRAW V1, V2, V3 // 10611384
|
||||
VSRAD V1, V2, V3 // 106113c4
|
||||
VSLDOI $3, V1, V2, V3 // 106110ec
|
||||
VCLZB V1, V2 // 10400f02
|
||||
VCLZH V1, V2 // 10400f42
|
||||
VCLZW V1, V2 // 10400f82
|
||||
VCLZD V1, V2 // 10400fc2
|
||||
VPOPCNTB V1, V2 // 10400f03
|
||||
VPOPCNTH V1, V2 // 10400f43
|
||||
VPOPCNTW V1, V2 // 10400f83
|
||||
VPOPCNTD V1, V2 // 10400fc3
|
||||
VCMPEQUB V1, V2, V3 // 10611006
|
||||
VCMPEQUBCC V1, V2, V3 // 10611406
|
||||
VCMPEQUH V1, V2, V3 // 10611046
|
||||
VCMPEQUHCC V1, V2, V3 // 10611446
|
||||
VCMPEQUW V1, V2, V3 // 10611086
|
||||
VCMPEQUWCC V1, V2, V3 // 10611486
|
||||
VCMPEQUD V1, V2, V3 // 106110c7
|
||||
VCMPEQUDCC V1, V2, V3 // 106114c7
|
||||
VCMPGTUB V1, V2, V3 // 10611206
|
||||
VCMPGTUBCC V1, V2, V3 // 10611606
|
||||
VCMPGTUH V1, V2, V3 // 10611246
|
||||
VCMPGTUHCC V1, V2, V3 // 10611646
|
||||
VCMPGTUW V1, V2, V3 // 10611286
|
||||
VCMPGTUWCC V1, V2, V3 // 10611686
|
||||
VCMPGTUD V1, V2, V3 // 106112c7
|
||||
VCMPGTUDCC V1, V2, V3 // 106116c7
|
||||
VCMPGTSB V1, V2, V3 // 10611306
|
||||
VCMPGTSBCC V1, V2, V3 // 10611706
|
||||
VCMPGTSH V1, V2, V3 // 10611346
|
||||
VCMPGTSHCC V1, V2, V3 // 10611746
|
||||
VCMPGTSW V1, V2, V3 // 10611386
|
||||
VCMPGTSWCC V1, V2, V3 // 10611786
|
||||
VCMPGTSD V1, V2, V3 // 106113c7
|
||||
VCMPGTSDCC V1, V2, V3 // 106117c7
|
||||
VCMPNEZB V1, V2, V3 // 10611107
|
||||
VCMPNEZBCC V1, V2, V3 // 10611507
|
||||
VCMPNEB V1, V2, V3 // 10611007
|
||||
VCMPNEBCC V1, V2, V3 // 10611407
|
||||
VCMPNEH V1, V2, V3 // 10611047
|
||||
VCMPNEHCC V1, V2, V3 // 10611447
|
||||
VCMPNEW V1, V2, V3 // 10611087
|
||||
VCMPNEWCC V1, V2, V3 // 10611487
|
||||
VPERM V1, V2, V3, V4 // 108110eb
|
||||
VPERMR V1, V2, V3, V4 // 108110fb
|
||||
VPERMXOR V1, V2, V3, V4 // 108110ed
|
||||
VBPERMQ V1, V2, V3 // 1061154c
|
||||
VBPERMD V1, V2, V3 // 106115cc
|
||||
VSEL V1, V2, V3, V4 // 108110ea
|
||||
VSPLTB $1, V1, V2 // 10410a0c
|
||||
VSPLTH $1, V1, V2 // 10410a4c
|
||||
VSPLTW $1, V1, V2 // 10410a8c
|
||||
VSPLTISB $1, V1 // 1021030c
|
||||
VSPLTISW $1, V1 // 1021038c
|
||||
VSPLTISH $1, V1 // 1021034c
|
||||
VCIPHER V1, V2, V3 // 10611508
|
||||
VCIPHERLAST V1, V2, V3 // 10611509
|
||||
VNCIPHER V1, V2, V3 // 10611548
|
||||
VNCIPHERLAST V1, V2, V3 // 10611549
|
||||
VSBOX V1, V2 // 104105c8
|
||||
VSHASIGMAW $1, V1, $15, V2 // 10418e82
|
||||
VSHASIGMAD $2, V1, $15, V2 // 104196c2
|
||||
|
||||
LXVD2X (R3)(R4), VS1 // 7c241e98
|
||||
LXV 16(R3), VS1 // f4230011
|
||||
LXVL R3, R4, VS1 // 7c23221a
|
||||
LXVLL R3, R4, VS1 // 7c23225a
|
||||
LXVX R3, R4, VS1 // 7c232218
|
||||
LXSDX (R3)(R4), VS1 // 7c241c98
|
||||
STXVD2X VS1, (R3)(R4) // 7c241f98
|
||||
STXV VS1,16(R3) // f4230015
|
||||
STXVL VS1, R3, R4 // 7c23231a
|
||||
STXVLL VS1, R3, R4 // 7c23235a
|
||||
STXVX VS1, R3, R4 // 7c232318
|
||||
STXSDX VS1, (R3)(R4) // 7c241d98
|
||||
LXSIWAX (R3)(R4), VS1 // 7c241898
|
||||
STXSIWX VS1, (R3)(R4) // 7c241918
|
||||
MFVSRD VS1, R3 // 7c230066
|
||||
MTVSRD R3, VS1 // 7c230166
|
||||
XXLAND VS1, VS2, VS3 // f0611410
|
||||
XXLOR VS1, VS2, VS3 // f0611490
|
||||
XXLORC VS1, VS2, VS3 // f0611550
|
||||
XXLXOR VS1, VS2, VS3 // f06114d0
|
||||
XXSEL VS1, VS2, VS3, VS4 // f08110f0
|
||||
XXMRGHW VS1, VS2, VS3 // f0611090
|
||||
XXSPLTW VS1, $1, VS2 // f0410a90
|
||||
XXPERM VS1, VS2, VS3 // f06110d0
|
||||
XXSLDWI VS1, VS2, $1, VS3 // f0611110
|
||||
XSCVDPSP VS1, VS2 // f0400c24
|
||||
XVCVDPSP VS1, VS2 // f0400e24
|
||||
XSCVSXDDP VS1, VS2 // f0400de0
|
||||
XVCVDPSXDS VS1, VS2 // f0400f60
|
||||
XVCVSXDDP VS1, VS2 // f0400fe0
|
||||
|
||||
MOVD R3, LR // 7c6803a6
|
||||
MOVD R3, CTR // 7c6903a6
|
||||
MOVD R3, XER // 7c6103a6
|
||||
MOVD LR, R3 // 7c6802a6
|
||||
MOVD CTR, R3 // 7c6902a6
|
||||
MOVD XER, R3 // 7c6102a6
|
||||
MOVFL CR3, CR1 // 4c8c0000
|
||||
|
||||
RET
|
||||
|
|
@ -109,6 +109,9 @@ func (in *Input) Next() ScanToken {
|
|||
in.Error("'#' must be first item on line")
|
||||
}
|
||||
in.beginningOfLine = in.hash()
|
||||
in.text = "#"
|
||||
return '#'
|
||||
|
||||
case scanner.Ident:
|
||||
// Is it a macro name?
|
||||
name := in.Stack.Text()
|
||||
|
|
|
|||
|
|
@ -22,11 +22,13 @@ type ScanToken rune
|
|||
const (
|
||||
// Asm defines some two-character lexemes. We make up
|
||||
// a rune/ScanToken value for them - ugly but simple.
|
||||
LSH ScanToken = -1000 - iota // << Left shift.
|
||||
RSH // >> Logical right shift.
|
||||
ARR // -> Used on ARM for shift type 3, arithmetic right shift.
|
||||
ROT // @> Used on ARM for shift type 4, rotate right.
|
||||
macroName // name of macro that should not be expanded
|
||||
LSH ScanToken = -1000 - iota // << Left shift.
|
||||
RSH // >> Logical right shift.
|
||||
ARR // -> Used on ARM for shift type 3, arithmetic right shift.
|
||||
ROT // @> Used on ARM for shift type 4, rotate right.
|
||||
Include // included file started here
|
||||
BuildComment // //go:build or +build comment
|
||||
macroName // name of macro that should not be expanded
|
||||
)
|
||||
|
||||
// IsRegisterShift reports whether the token is one of the ARM register shift operators.
|
||||
|
|
|
|||
|
|
@ -281,6 +281,9 @@ func drain(input *Input) string {
|
|||
if tok == scanner.EOF {
|
||||
return buf.String()
|
||||
}
|
||||
if tok == '#' {
|
||||
continue
|
||||
}
|
||||
if buf.Len() > 0 {
|
||||
buf.WriteByte('.')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -107,10 +107,13 @@ func (t *Tokenizer) Next() ScanToken {
|
|||
if t.tok != scanner.Comment {
|
||||
break
|
||||
}
|
||||
length := strings.Count(s.TokenText(), "\n")
|
||||
t.line += length
|
||||
// TODO: If we ever have //go: comments in assembly, will need to keep them here.
|
||||
// For now, just discard all comments.
|
||||
text := s.TokenText()
|
||||
t.line += strings.Count(text, "\n")
|
||||
// TODO: Use constraint.IsGoBuild once it exists.
|
||||
if strings.HasPrefix(text, "//go:build") {
|
||||
t.tok = BuildComment
|
||||
break
|
||||
}
|
||||
}
|
||||
switch t.tok {
|
||||
case '\n':
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ func main() {
|
|||
case "all", "ret":
|
||||
ctxt.Retpoline = true
|
||||
}
|
||||
compilingRuntime := objabi.IsRuntimePackagePath(*flags.Importpath)
|
||||
|
||||
ctxt.Bso = bufio.NewWriter(os.Stdout)
|
||||
defer ctxt.Bso.Flush()
|
||||
|
|
@ -74,7 +75,7 @@ func main() {
|
|||
var failedFile string
|
||||
for _, f := range flag.Args() {
|
||||
lexer := lex.NewLexer(f)
|
||||
parser := asm.NewParser(ctxt, architecture, lexer)
|
||||
parser := asm.NewParser(ctxt, architecture, lexer, compilingRuntime)
|
||||
ctxt.DiagFunc = func(format string, args ...interface{}) {
|
||||
diag = true
|
||||
log.Printf(format, args...)
|
||||
|
|
|
|||
|
|
@ -170,35 +170,51 @@ func usage() {
|
|||
|
||||
var ptrSizeMap = map[string]int64{
|
||||
"386": 4,
|
||||
"alpha": 8,
|
||||
"amd64": 8,
|
||||
"arm": 4,
|
||||
"arm64": 8,
|
||||
"m68k": 4,
|
||||
"mips": 4,
|
||||
"mipsle": 4,
|
||||
"mips64": 8,
|
||||
"mips64le": 8,
|
||||
"nios2": 4,
|
||||
"ppc": 4,
|
||||
"ppc64": 8,
|
||||
"ppc64le": 8,
|
||||
"riscv": 4,
|
||||
"riscv64": 8,
|
||||
"s390": 4,
|
||||
"s390x": 8,
|
||||
"sh": 4,
|
||||
"shbe": 4,
|
||||
"sparc": 4,
|
||||
"sparc64": 8,
|
||||
}
|
||||
|
||||
var intSizeMap = map[string]int64{
|
||||
"386": 4,
|
||||
"alpha": 8,
|
||||
"amd64": 8,
|
||||
"arm": 4,
|
||||
"arm64": 8,
|
||||
"m68k": 4,
|
||||
"mips": 4,
|
||||
"mipsle": 4,
|
||||
"mips64": 8,
|
||||
"mips64le": 8,
|
||||
"nios2": 4,
|
||||
"ppc": 4,
|
||||
"ppc64": 8,
|
||||
"ppc64le": 8,
|
||||
"riscv": 4,
|
||||
"riscv64": 8,
|
||||
"s390": 4,
|
||||
"s390x": 8,
|
||||
"sh": 4,
|
||||
"shbe": 4,
|
||||
"sparc": 4,
|
||||
"sparc64": 8,
|
||||
}
|
||||
|
||||
|
|
@ -224,8 +240,7 @@ var exportHeader = flag.String("exportheader", "", "where to write export header
|
|||
var gccgo = flag.Bool("gccgo", false, "generate files for use with gccgo")
|
||||
var gccgoprefix = flag.String("gccgoprefix", "", "-fgo-prefix option used with gccgo")
|
||||
var gccgopkgpath = flag.String("gccgopkgpath", "", "-fgo-pkgpath option used with gccgo")
|
||||
var gccgoMangleCheckDone bool
|
||||
var gccgoNewmanglingInEffect bool
|
||||
var gccgoMangler func(string) string
|
||||
var importRuntimeCgo = flag.Bool("import_runtime_cgo", true, "import runtime/cgo in generated code")
|
||||
var importSyscall = flag.Bool("import_syscall", true, "import syscall in generated code")
|
||||
var goarch, goos string
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package main
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"cmd/internal/pkgpath"
|
||||
"debug/elf"
|
||||
"debug/macho"
|
||||
"debug/pe"
|
||||
|
|
@ -15,7 +16,6 @@ import (
|
|||
"go/token"
|
||||
"internal/xcoff"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
|
@ -1282,112 +1282,24 @@ func (p *Package) writeExportHeader(fgcch io.Writer) {
|
|||
fmt.Fprintf(fgcch, "%s\n", p.gccExportHeaderProlog())
|
||||
}
|
||||
|
||||
// gccgoUsesNewMangling reports whether gccgo uses the new collision-free
|
||||
// packagepath mangling scheme (see determineGccgoManglingScheme for more
|
||||
// info).
|
||||
func gccgoUsesNewMangling() bool {
|
||||
if !gccgoMangleCheckDone {
|
||||
gccgoNewmanglingInEffect = determineGccgoManglingScheme()
|
||||
gccgoMangleCheckDone = true
|
||||
}
|
||||
return gccgoNewmanglingInEffect
|
||||
}
|
||||
|
||||
const mangleCheckCode = `
|
||||
package läufer
|
||||
func Run(x int) int {
|
||||
return 1
|
||||
}
|
||||
`
|
||||
|
||||
// determineGccgoManglingScheme performs a runtime test to see which
|
||||
// flavor of packagepath mangling gccgo is using. Older versions of
|
||||
// gccgo use a simple mangling scheme where there can be collisions
|
||||
// between packages whose paths are different but mangle to the same
|
||||
// string. More recent versions of gccgo use a new mangler that avoids
|
||||
// these collisions. Return value is whether gccgo uses the new mangling.
|
||||
func determineGccgoManglingScheme() bool {
|
||||
|
||||
// Emit a small Go file for gccgo to compile.
|
||||
filepat := "*_gccgo_manglecheck.go"
|
||||
var f *os.File
|
||||
var err error
|
||||
if f, err = ioutil.TempFile(*objDir, filepat); err != nil {
|
||||
fatalf("%v", err)
|
||||
}
|
||||
gofilename := f.Name()
|
||||
defer os.Remove(gofilename)
|
||||
|
||||
if err = ioutil.WriteFile(gofilename, []byte(mangleCheckCode), 0666); err != nil {
|
||||
fatalf("%v", err)
|
||||
}
|
||||
|
||||
// Compile with gccgo, capturing generated assembly.
|
||||
gccgocmd := os.Getenv("GCCGO")
|
||||
if gccgocmd == "" {
|
||||
gpath, gerr := exec.LookPath("gccgo")
|
||||
if gerr != nil {
|
||||
fatalf("unable to locate gccgo: %v", gerr)
|
||||
}
|
||||
gccgocmd = gpath
|
||||
}
|
||||
cmd := exec.Command(gccgocmd, "-S", "-o", "-", gofilename)
|
||||
buf, cerr := cmd.CombinedOutput()
|
||||
if cerr != nil {
|
||||
fatalf("%s", cerr)
|
||||
}
|
||||
|
||||
// New mangling: expect go.l..u00e4ufer.Run
|
||||
// Old mangling: expect go.l__ufer.Run
|
||||
return regexp.MustCompile(`go\.l\.\.u00e4ufer\.Run`).Match(buf)
|
||||
}
|
||||
|
||||
// gccgoPkgpathToSymbolNew converts a package path to a gccgo-style
|
||||
// package symbol.
|
||||
func gccgoPkgpathToSymbolNew(ppath string) string {
|
||||
bsl := []byte{}
|
||||
changed := false
|
||||
for _, c := range []byte(ppath) {
|
||||
switch {
|
||||
case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z',
|
||||
'0' <= c && c <= '9', c == '_':
|
||||
bsl = append(bsl, c)
|
||||
case c == '.':
|
||||
bsl = append(bsl, ".x2e"...)
|
||||
default:
|
||||
changed = true
|
||||
encbytes := []byte(fmt.Sprintf("..z%02x", c))
|
||||
bsl = append(bsl, encbytes...)
|
||||
}
|
||||
}
|
||||
if !changed {
|
||||
return ppath
|
||||
}
|
||||
return string(bsl)
|
||||
}
|
||||
|
||||
// gccgoPkgpathToSymbolOld converts a package path to a gccgo-style
|
||||
// package symbol using the older mangling scheme.
|
||||
func gccgoPkgpathToSymbolOld(ppath string) string {
|
||||
clean := func(r rune) rune {
|
||||
switch {
|
||||
case 'A' <= r && r <= 'Z', 'a' <= r && r <= 'z',
|
||||
'0' <= r && r <= '9':
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}
|
||||
return strings.Map(clean, ppath)
|
||||
}
|
||||
|
||||
// gccgoPkgpathToSymbol converts a package path to a mangled packagepath
|
||||
// symbol.
|
||||
func gccgoPkgpathToSymbol(ppath string) string {
|
||||
if gccgoUsesNewMangling() {
|
||||
return gccgoPkgpathToSymbolNew(ppath)
|
||||
} else {
|
||||
return gccgoPkgpathToSymbolOld(ppath)
|
||||
if gccgoMangler == nil {
|
||||
var err error
|
||||
cmd := os.Getenv("GCCGO")
|
||||
if cmd == "" {
|
||||
cmd, err = exec.LookPath("gccgo")
|
||||
if err != nil {
|
||||
fatalf("unable to locate gccgo: %v", err)
|
||||
}
|
||||
}
|
||||
gccgoMangler, err = pkgpath.ToSymbolFunc(cmd, *objDir)
|
||||
if err != nil {
|
||||
fatalf("%v", err)
|
||||
}
|
||||
}
|
||||
return gccgoMangler(ppath)
|
||||
}
|
||||
|
||||
// Return the package prefix when using gccgo.
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@
|
|||
package gc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"cmd/compile/internal/types"
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
|
|
@ -173,6 +175,91 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
|
|||
return o
|
||||
}
|
||||
|
||||
// findTypeLoop searches for an invalid type declaration loop involving
|
||||
// type t and reports whether one is found. If so, path contains the
|
||||
// loop.
|
||||
//
|
||||
// path points to a slice used for tracking the sequence of types
|
||||
// visited. Using a pointer to a slice allows the slice capacity to
|
||||
// grow and limit reallocations.
|
||||
func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
|
||||
// We implement a simple DFS loop-finding algorithm. This
|
||||
// could be faster, but type cycles are rare.
|
||||
|
||||
if t.Sym != nil {
|
||||
// Declared type. Check for loops and otherwise
|
||||
// recurse on the type expression used in the type
|
||||
// declaration.
|
||||
|
||||
for i, x := range *path {
|
||||
if x == t {
|
||||
*path = (*path)[i:]
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
*path = append(*path, t)
|
||||
if findTypeLoop(asNode(t.Nod).Name.Param.Ntype.Type, path) {
|
||||
return true
|
||||
}
|
||||
*path = (*path)[:len(*path)-1]
|
||||
} else {
|
||||
// Anonymous type. Recurse on contained types.
|
||||
|
||||
switch t.Etype {
|
||||
case TARRAY:
|
||||
if findTypeLoop(t.Elem(), path) {
|
||||
return true
|
||||
}
|
||||
case TSTRUCT:
|
||||
for _, f := range t.Fields().Slice() {
|
||||
if findTypeLoop(f.Type, path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
case TINTER:
|
||||
for _, m := range t.Methods().Slice() {
|
||||
if m.Type.IsInterface() { // embedded interface
|
||||
if findTypeLoop(m.Type, path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func reportTypeLoop(t *types.Type) {
|
||||
if t.Broke() {
|
||||
return
|
||||
}
|
||||
|
||||
var l []*types.Type
|
||||
if !findTypeLoop(t, &l) {
|
||||
Fatalf("failed to find type loop for: %v", t)
|
||||
}
|
||||
|
||||
// Rotate loop so that the earliest type declaration is first.
|
||||
i := 0
|
||||
for j, t := range l[1:] {
|
||||
if typePos(t).Before(typePos(l[i])) {
|
||||
i = j + 1
|
||||
}
|
||||
}
|
||||
l = append(l[i:], l[:i]...)
|
||||
|
||||
var msg bytes.Buffer
|
||||
fmt.Fprintf(&msg, "invalid recursive type %v\n", l[0])
|
||||
for _, t := range l {
|
||||
fmt.Fprintf(&msg, "\t%v: %v refers to\n", linestr(typePos(t)), t)
|
||||
t.SetBroke(true)
|
||||
}
|
||||
fmt.Fprintf(&msg, "\t%v: %v", linestr(typePos(l[0])), l[0])
|
||||
yyerrorl(typePos(l[0]), msg.String())
|
||||
}
|
||||
|
||||
// dowidth calculates and stores the size and alignment for t.
|
||||
// If sizeCalculationDisabled is set, and the size/alignment
|
||||
// have not already been calculated, it calls Fatal.
|
||||
|
|
@ -192,11 +279,7 @@ func dowidth(t *types.Type) {
|
|||
}
|
||||
|
||||
if t.Width == -2 {
|
||||
if !t.Broke() {
|
||||
t.SetBroke(true)
|
||||
yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
|
||||
}
|
||||
|
||||
reportTypeLoop(t)
|
||||
t.Width = 0
|
||||
t.Align = 1
|
||||
return
|
||||
|
|
@ -308,10 +391,7 @@ func dowidth(t *types.Type) {
|
|||
checkwidth(t.Key())
|
||||
|
||||
case TFORW: // should have been filled in
|
||||
if !t.Broke() {
|
||||
t.SetBroke(true)
|
||||
yyerror("invalid recursive type %v", t)
|
||||
}
|
||||
reportTypeLoop(t)
|
||||
w = 1 // anything will do
|
||||
|
||||
case TANY:
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ package gc
|
|||
import "testing"
|
||||
|
||||
var globl int64
|
||||
var globl32 int32
|
||||
|
||||
func BenchmarkLoadAdd(b *testing.B) {
|
||||
x := make([]int64, 1024)
|
||||
|
|
@ -20,6 +21,18 @@ func BenchmarkLoadAdd(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
// Added for ppc64 extswsli on power9
|
||||
func BenchmarkExtShift(b *testing.B) {
|
||||
x := make([]int32, 1024)
|
||||
for i := 0; i < b.N; i++ {
|
||||
var s int64
|
||||
for i := range x {
|
||||
s ^= int64(x[i]+32) * 8
|
||||
}
|
||||
globl = s
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkModify(b *testing.B) {
|
||||
a := make([]int64, 1024)
|
||||
v := globl
|
||||
|
|
@ -30,6 +43,17 @@ func BenchmarkModify(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
func BenchmarkMullImm(b *testing.B) {
|
||||
x := make([]int32, 1024)
|
||||
for i := 0; i < b.N; i++ {
|
||||
var s int32
|
||||
for i := range x {
|
||||
s += x[i] * 100
|
||||
}
|
||||
globl32 = s
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkConstModify(b *testing.B) {
|
||||
a := make([]int64, 1024)
|
||||
for i := 0; i < b.N; i++ {
|
||||
|
|
|
|||
|
|
@ -81,11 +81,6 @@ func (p *exporter) markType(t *types.Type) {
|
|||
}
|
||||
}
|
||||
|
||||
// deltaNewFile is a magic line delta offset indicating a new file.
|
||||
// We use -64 because it is rare; see issue 20080 and CL 41619.
|
||||
// -64 is the smallest int that fits in a single byte as a varint.
|
||||
const deltaNewFile = -64
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Export format
|
||||
|
||||
|
|
@ -126,30 +121,6 @@ const (
|
|||
aliasTag
|
||||
)
|
||||
|
||||
// untype returns the "pseudo" untyped type for a Ctype (import/export use only).
|
||||
// (we can't use a pre-initialized array because we must be sure all types are
|
||||
// set up)
|
||||
func untype(ctype Ctype) *types.Type {
|
||||
switch ctype {
|
||||
case CTINT:
|
||||
return types.Idealint
|
||||
case CTRUNE:
|
||||
return types.Idealrune
|
||||
case CTFLT:
|
||||
return types.Idealfloat
|
||||
case CTCPLX:
|
||||
return types.Idealcomplex
|
||||
case CTSTR:
|
||||
return types.Idealstring
|
||||
case CTBOOL:
|
||||
return types.Idealbool
|
||||
case CTNIL:
|
||||
return types.Types[TNIL]
|
||||
}
|
||||
Fatalf("exporter: unknown Ctype")
|
||||
return nil
|
||||
}
|
||||
|
||||
var predecl []*types.Type // initialized lazily
|
||||
|
||||
func predeclared() []*types.Type {
|
||||
|
|
@ -184,13 +155,13 @@ func predeclared() []*types.Type {
|
|||
types.Errortype,
|
||||
|
||||
// untyped types
|
||||
untype(CTBOOL),
|
||||
untype(CTINT),
|
||||
untype(CTRUNE),
|
||||
untype(CTFLT),
|
||||
untype(CTCPLX),
|
||||
untype(CTSTR),
|
||||
untype(CTNIL),
|
||||
types.UntypedBool,
|
||||
types.UntypedInt,
|
||||
types.UntypedRune,
|
||||
types.UntypedFloat,
|
||||
types.UntypedComplex,
|
||||
types.UntypedString,
|
||||
types.Types[TNIL],
|
||||
|
||||
// package unsafe
|
||||
types.Types[TUNSAFEPTR],
|
||||
|
|
|
|||
|
|
@ -114,16 +114,16 @@ func (v Val) Interface() interface{} {
|
|||
|
||||
type NilVal struct{}
|
||||
|
||||
// Int64 returns n as an int64.
|
||||
// Int64Val returns n as an int64.
|
||||
// n must be an integer or rune constant.
|
||||
func (n *Node) Int64() int64 {
|
||||
func (n *Node) Int64Val() int64 {
|
||||
if !Isconst(n, CTINT) {
|
||||
Fatalf("Int64(%v)", n)
|
||||
Fatalf("Int64Val(%v)", n)
|
||||
}
|
||||
return n.Val().U.(*Mpint).Int64()
|
||||
}
|
||||
|
||||
// CanInt64 reports whether it is safe to call Int64() on n.
|
||||
// CanInt64 reports whether it is safe to call Int64Val() on n.
|
||||
func (n *Node) CanInt64() bool {
|
||||
if !Isconst(n, CTINT) {
|
||||
return false
|
||||
|
|
@ -131,18 +131,27 @@ func (n *Node) CanInt64() bool {
|
|||
|
||||
// if the value inside n cannot be represented as an int64, the
|
||||
// return value of Int64 is undefined
|
||||
return n.Val().U.(*Mpint).CmpInt64(n.Int64()) == 0
|
||||
return n.Val().U.(*Mpint).CmpInt64(n.Int64Val()) == 0
|
||||
}
|
||||
|
||||
// Bool returns n as a bool.
|
||||
// BoolVal returns n as a bool.
|
||||
// n must be a boolean constant.
|
||||
func (n *Node) Bool() bool {
|
||||
func (n *Node) BoolVal() bool {
|
||||
if !Isconst(n, CTBOOL) {
|
||||
Fatalf("Bool(%v)", n)
|
||||
Fatalf("BoolVal(%v)", n)
|
||||
}
|
||||
return n.Val().U.(bool)
|
||||
}
|
||||
|
||||
// StringVal returns the value of a literal string Node as a string.
|
||||
// n must be a string constant.
|
||||
func (n *Node) StringVal() string {
|
||||
if !Isconst(n, CTSTR) {
|
||||
Fatalf("StringVal(%v)", n)
|
||||
}
|
||||
return n.Val().U.(string)
|
||||
}
|
||||
|
||||
// truncate float literal fv to 32-bit or 64-bit precision
|
||||
// according to type; return truncated value.
|
||||
func truncfltlit(oldv *Mpflt, t *types.Type) *Mpflt {
|
||||
|
|
@ -612,7 +621,7 @@ func evconst(n *Node) {
|
|||
var strs []string
|
||||
i2 := i1
|
||||
for i2 < len(s) && Isconst(s[i2], CTSTR) {
|
||||
strs = append(strs, strlit(s[i2]))
|
||||
strs = append(strs, s[i2].StringVal())
|
||||
i2++
|
||||
}
|
||||
|
||||
|
|
@ -635,7 +644,7 @@ func evconst(n *Node) {
|
|||
switch nl.Type.Etype {
|
||||
case TSTRING:
|
||||
if Isconst(nl, CTSTR) {
|
||||
setintconst(n, int64(len(strlit(nl))))
|
||||
setintconst(n, int64(len(nl.StringVal())))
|
||||
}
|
||||
case TARRAY:
|
||||
if !hascallchan(nl) {
|
||||
|
|
@ -1019,17 +1028,17 @@ func nodlit(v Val) *Node {
|
|||
func idealType(ct Ctype) *types.Type {
|
||||
switch ct {
|
||||
case CTSTR:
|
||||
return types.Idealstring
|
||||
return types.UntypedString
|
||||
case CTBOOL:
|
||||
return types.Idealbool
|
||||
return types.UntypedBool
|
||||
case CTINT:
|
||||
return types.Idealint
|
||||
return types.UntypedInt
|
||||
case CTRUNE:
|
||||
return types.Idealrune
|
||||
return types.UntypedRune
|
||||
case CTFLT:
|
||||
return types.Idealfloat
|
||||
return types.UntypedFloat
|
||||
case CTCPLX:
|
||||
return types.Idealcomplex
|
||||
return types.UntypedComplex
|
||||
case CTNIL:
|
||||
return types.Types[TNIL]
|
||||
}
|
||||
|
|
@ -1080,17 +1089,17 @@ func defaultlit2(l *Node, r *Node, force bool) (*Node, *Node) {
|
|||
|
||||
func ctype(t *types.Type) Ctype {
|
||||
switch t {
|
||||
case types.Idealbool:
|
||||
case types.UntypedBool:
|
||||
return CTBOOL
|
||||
case types.Idealstring:
|
||||
case types.UntypedString:
|
||||
return CTSTR
|
||||
case types.Idealint:
|
||||
case types.UntypedInt:
|
||||
return CTINT
|
||||
case types.Idealrune:
|
||||
case types.UntypedRune:
|
||||
return CTRUNE
|
||||
case types.Idealfloat:
|
||||
case types.UntypedFloat:
|
||||
return CTFLT
|
||||
case types.Idealcomplex:
|
||||
case types.UntypedComplex:
|
||||
return CTCPLX
|
||||
}
|
||||
Fatalf("bad type %v", t)
|
||||
|
|
@ -1111,17 +1120,17 @@ func defaultType(t *types.Type) *types.Type {
|
|||
}
|
||||
|
||||
switch t {
|
||||
case types.Idealbool:
|
||||
case types.UntypedBool:
|
||||
return types.Types[TBOOL]
|
||||
case types.Idealstring:
|
||||
case types.UntypedString:
|
||||
return types.Types[TSTRING]
|
||||
case types.Idealint:
|
||||
case types.UntypedInt:
|
||||
return types.Types[TINT]
|
||||
case types.Idealrune:
|
||||
case types.UntypedRune:
|
||||
return types.Runetype
|
||||
case types.Idealfloat:
|
||||
case types.UntypedFloat:
|
||||
return types.Types[TFLOAT64]
|
||||
case types.Idealcomplex:
|
||||
case types.UntypedComplex:
|
||||
return types.Types[TCOMPLEX128]
|
||||
}
|
||||
|
||||
|
|
@ -1129,12 +1138,6 @@ func defaultType(t *types.Type) *types.Type {
|
|||
return nil
|
||||
}
|
||||
|
||||
// strlit returns the value of a literal string Node as a string.
|
||||
func strlit(n *Node) string {
|
||||
return n.Val().U.(string)
|
||||
}
|
||||
|
||||
// TODO(gri) smallintconst is only used in one place - can we used indexconst?
|
||||
func smallintconst(n *Node) bool {
|
||||
if n.Op == OLITERAL && Isconst(n, CTINT) && n.Type != nil {
|
||||
switch simtype[n.Type.Etype] {
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
|||
|
||||
// Walk progs to build up the InlCalls data structure
|
||||
var prevpos src.XPos
|
||||
for p := fnsym.Func.Text; p != nil; p = p.Link {
|
||||
for p := fnsym.Func().Text; p != nil; p = p.Link {
|
||||
if p.Pos == prevpos {
|
||||
continue
|
||||
}
|
||||
|
|
@ -150,7 +150,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
|||
start := int64(-1)
|
||||
curii := -1
|
||||
var prevp *obj.Prog
|
||||
for p := fnsym.Func.Text; p != nil; prevp, p = p, p.Link {
|
||||
for p := fnsym.Func().Text; p != nil; prevp, p = p, p.Link {
|
||||
if prevp != nil && p.Pos == prevp.Pos {
|
||||
continue
|
||||
}
|
||||
|
|
|
|||
|
|
@ -169,36 +169,47 @@ func mayAffectMemory(n *Node) bool {
|
|||
}
|
||||
}
|
||||
|
||||
func mustHeapAlloc(n *Node) bool {
|
||||
// heapAllocReason returns the reason the given Node must be heap
|
||||
// allocated, or the empty string if it doesn't.
|
||||
func heapAllocReason(n *Node) string {
|
||||
if n.Type == nil {
|
||||
return false
|
||||
return ""
|
||||
}
|
||||
|
||||
// Parameters are always passed via the stack.
|
||||
if n.Op == ONAME && (n.Class() == PPARAM || n.Class() == PPARAMOUT) {
|
||||
return false
|
||||
return ""
|
||||
}
|
||||
|
||||
if n.Type.Width > maxStackVarSize {
|
||||
return true
|
||||
return "too large for stack"
|
||||
}
|
||||
|
||||
if (n.Op == ONEW || n.Op == OPTRLIT) && n.Type.Elem().Width >= maxImplicitStackVarSize {
|
||||
return true
|
||||
return "too large for stack"
|
||||
}
|
||||
|
||||
if n.Op == OCLOSURE && closureType(n).Size() >= maxImplicitStackVarSize {
|
||||
return true
|
||||
return "too large for stack"
|
||||
}
|
||||
if n.Op == OCALLPART && partialCallType(n).Size() >= maxImplicitStackVarSize {
|
||||
return true
|
||||
return "too large for stack"
|
||||
}
|
||||
|
||||
if n.Op == OMAKESLICE && !isSmallMakeSlice(n) {
|
||||
return true
|
||||
if n.Op == OMAKESLICE {
|
||||
r := n.Right
|
||||
if r == nil {
|
||||
r = n.Left
|
||||
}
|
||||
if !smallintconst(r) {
|
||||
return "non-constant size"
|
||||
}
|
||||
if t := n.Type; t.Elem().Width != 0 && r.Int64Val() >= maxImplicitStackVarSize/t.Elem().Width {
|
||||
return "too large for stack"
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return ""
|
||||
}
|
||||
|
||||
// addrescapes tags node n as having had its address taken
|
||||
|
|
|
|||
|
|
@ -771,10 +771,11 @@ func (e *Escape) call(ks []EscHole, call, where *Node) {
|
|||
var fn *Node
|
||||
switch call.Op {
|
||||
case OCALLFUNC:
|
||||
if call.Left.Op == ONAME && call.Left.Class() == PFUNC {
|
||||
fn = call.Left
|
||||
} else if call.Left.Op == OCLOSURE {
|
||||
fn = call.Left.Func.Closure.Func.Nname
|
||||
switch v := staticValue(call.Left); {
|
||||
case v.Op == ONAME && v.Class() == PFUNC:
|
||||
fn = v
|
||||
case v.Op == OCLOSURE:
|
||||
fn = v.Func.Closure.Func.Nname
|
||||
}
|
||||
case OCALLMETH:
|
||||
fn = asNode(call.Left.Type.FuncType().Nname)
|
||||
|
|
@ -1051,11 +1052,7 @@ func (e *Escape) newLoc(n *Node, transient bool) *EscLocation {
|
|||
}
|
||||
n.SetOpt(loc)
|
||||
|
||||
if mustHeapAlloc(n) {
|
||||
why := "too large for stack"
|
||||
if n.Op == OMAKESLICE && (!Isconst(n.Left, CTINT) || !Isconst(n.Right, CTINT)) {
|
||||
why = "non-constant size"
|
||||
}
|
||||
if why := heapAllocReason(n); why != "" {
|
||||
e.flow(e.heapHole().addr(n, why), loc)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
|
|||
return n
|
||||
}
|
||||
|
||||
// pkgtype returns the named type declared by symbol s.
|
||||
// importtype returns the named type declared by symbol s.
|
||||
// If no such type has been declared yet, a forward declaration is returned.
|
||||
// ipkg is the package being imported
|
||||
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
|
||||
|
|
|
|||
|
|
@ -6,17 +6,9 @@ package gc
|
|||
|
||||
import (
|
||||
"math"
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// For GO386=387, make sure fucomi* opcodes are not used
|
||||
// for comparison operations.
|
||||
// Note that this test will fail only on a Pentium MMX
|
||||
// processor (with GOARCH=386 GO386=387), as it just runs
|
||||
// some code and looks for an unimplemented instruction fault.
|
||||
|
||||
//go:noinline
|
||||
func compare1(a, b float64) bool {
|
||||
return a < b
|
||||
|
|
@ -137,9 +129,6 @@ func TestFloatCompareFolded(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// For GO386=387, make sure fucomi* opcodes are not used
|
||||
// for float->int conversions.
|
||||
|
||||
//go:noinline
|
||||
func cvt1(a float64) uint64 {
|
||||
return uint64(a)
|
||||
|
|
@ -370,14 +359,6 @@ func TestFloat32StoreToLoadConstantFold(t *testing.T) {
|
|||
// are not converted to quiet NaN (qNaN) values during compilation.
|
||||
// See issue #27193 for more information.
|
||||
|
||||
// TODO: this method for detecting 387 won't work if the compiler has been
|
||||
// built using GOARCH=386 GO386=387 and either the target is a different
|
||||
// architecture or the GO386=387 environment variable is not set when the
|
||||
// test is run.
|
||||
if runtime.GOARCH == "386" && os.Getenv("GO386") == "387" {
|
||||
t.Skip("signaling NaNs are not propagated on 387 (issue #27516)")
|
||||
}
|
||||
|
||||
// signaling NaNs
|
||||
{
|
||||
const nan = uint32(0x7f800001) // sNaN
|
||||
|
|
|
|||
|
|
@ -773,17 +773,17 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
|
|||
if int(t.Etype) < len(basicnames) && basicnames[t.Etype] != "" {
|
||||
var name string
|
||||
switch t {
|
||||
case types.Idealbool:
|
||||
case types.UntypedBool:
|
||||
name = "untyped bool"
|
||||
case types.Idealstring:
|
||||
case types.UntypedString:
|
||||
name = "untyped string"
|
||||
case types.Idealint:
|
||||
case types.UntypedInt:
|
||||
name = "untyped int"
|
||||
case types.Idealrune:
|
||||
case types.UntypedRune:
|
||||
name = "untyped rune"
|
||||
case types.Idealfloat:
|
||||
case types.UntypedFloat:
|
||||
name = "untyped float"
|
||||
case types.Idealcomplex:
|
||||
case types.UntypedComplex:
|
||||
name = "untyped complex"
|
||||
default:
|
||||
name = basicnames[t.Etype]
|
||||
|
|
@ -1333,7 +1333,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
|
|||
n.Orig.exprfmt(s, prec, mode)
|
||||
return
|
||||
}
|
||||
if n.Type != nil && n.Type.Etype != TIDEAL && n.Type.Etype != TNIL && n.Type != types.Idealbool && n.Type != types.Idealstring {
|
||||
if n.Type != nil && !n.Type.IsUntyped() {
|
||||
// Need parens when type begins with what might
|
||||
// be misinterpreted as a unary operator: * or <-.
|
||||
if n.Type.IsPtr() || (n.Type.IsChan() && n.Type.ChanDir() == types.Crecv) {
|
||||
|
|
|
|||
|
|
@ -259,7 +259,6 @@ type Arch struct {
|
|||
|
||||
REGSP int
|
||||
MAXWIDTH int64
|
||||
Use387 bool // should 386 backend use 387 FP instructions instead of sse2.
|
||||
SoftFloat bool
|
||||
|
||||
PadFrame func(int64) int64
|
||||
|
|
@ -328,10 +327,6 @@ var (
|
|||
BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
|
||||
ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
|
||||
|
||||
// GO386=387
|
||||
ControlWord64trunc,
|
||||
ControlWord32 *obj.LSym
|
||||
|
||||
// Wasm
|
||||
WasmMove,
|
||||
WasmZero,
|
||||
|
|
|
|||
|
|
@ -199,7 +199,7 @@ func (pp *Progs) settext(fn *Node) {
|
|||
ptxt := pp.Prog(obj.ATEXT)
|
||||
pp.Text = ptxt
|
||||
|
||||
fn.Func.lsym.Func.Text = ptxt
|
||||
fn.Func.lsym.Func().Text = ptxt
|
||||
ptxt.From.Type = obj.TYPE_MEM
|
||||
ptxt.From.Name = obj.NAME_EXTERN
|
||||
ptxt.From.Sym = fn.Func.lsym
|
||||
|
|
|
|||
|
|
@ -751,11 +751,11 @@ func (w *exportWriter) param(f *types.Field) {
|
|||
|
||||
func constTypeOf(typ *types.Type) Ctype {
|
||||
switch typ {
|
||||
case types.Idealint, types.Idealrune:
|
||||
case types.UntypedInt, types.UntypedRune:
|
||||
return CTINT
|
||||
case types.Idealfloat:
|
||||
case types.UntypedFloat:
|
||||
return CTFLT
|
||||
case types.Idealcomplex:
|
||||
case types.UntypedComplex:
|
||||
return CTCPLX
|
||||
}
|
||||
|
||||
|
|
@ -780,8 +780,8 @@ func constTypeOf(typ *types.Type) Ctype {
|
|||
}
|
||||
|
||||
func (w *exportWriter) value(typ *types.Type, v Val) {
|
||||
if typ.IsUntyped() {
|
||||
typ = untype(v.Ctype())
|
||||
if vt := idealType(v.Ctype()); typ.IsUntyped() && typ != vt {
|
||||
Fatalf("exporter: untyped type mismatch, have: %v, want: %v", typ, vt)
|
||||
}
|
||||
w.typ(typ)
|
||||
|
||||
|
|
@ -1017,6 +1017,8 @@ func (w *exportWriter) symIdx(s *types.Sym) {
|
|||
}
|
||||
|
||||
func (w *exportWriter) typeExt(t *types.Type) {
|
||||
// Export whether this type is marked notinheap.
|
||||
w.bool(t.NotInHeap())
|
||||
// For type T, export the index of type descriptor symbols of T and *T.
|
||||
if i, ok := typeSymIdx[t]; ok {
|
||||
w.int64(i[0])
|
||||
|
|
|
|||
|
|
@ -375,7 +375,7 @@ func (p *importReader) value() (typ *types.Type, v Val) {
|
|||
v.U = p.string()
|
||||
case CTINT:
|
||||
x := new(Mpint)
|
||||
x.Rune = typ == types.Idealrune
|
||||
x.Rune = typ == types.UntypedRune
|
||||
p.mpint(&x.Val, typ)
|
||||
v.U = x
|
||||
case CTFLT:
|
||||
|
|
@ -596,7 +596,6 @@ func (r *importReader) typ1() *types.Type {
|
|||
|
||||
// Ensure we expand the interface in the frontend (#25055).
|
||||
checkwidth(t)
|
||||
|
||||
return t
|
||||
}
|
||||
}
|
||||
|
|
@ -711,6 +710,7 @@ func (r *importReader) symIdx(s *types.Sym) {
|
|||
}
|
||||
|
||||
func (r *importReader) typeExt(t *types.Type) {
|
||||
t.SetNotInHeap(r.bool())
|
||||
i, pi := r.int64(), r.int64()
|
||||
if i != -1 && pi != -1 {
|
||||
typeSymIdx[t] = [2]int64{i, pi}
|
||||
|
|
|
|||
|
|
@ -325,18 +325,10 @@ func (v *hairyVisitor) visit(n *Node) bool {
|
|||
break
|
||||
}
|
||||
|
||||
if fn := n.Left.Func; fn != nil && fn.Inl != nil {
|
||||
v.budget -= fn.Inl.Cost
|
||||
if fn := inlCallee(n.Left); fn != nil && fn.Func.Inl != nil {
|
||||
v.budget -= fn.Func.Inl.Cost
|
||||
break
|
||||
}
|
||||
if n.Left.isMethodExpression() {
|
||||
if d := asNode(n.Left.Sym.Def); d != nil && d.Func.Inl != nil {
|
||||
v.budget -= d.Func.Inl.Cost
|
||||
break
|
||||
}
|
||||
}
|
||||
// TODO(mdempsky): Budget for OCLOSURE calls if we
|
||||
// ever allow that. See #15561 and #23093.
|
||||
|
||||
// Call cost for non-leaf inlining.
|
||||
v.budget -= v.extraCallCost
|
||||
|
|
@ -385,14 +377,11 @@ func (v *hairyVisitor) visit(n *Node) bool {
|
|||
case OCLOSURE,
|
||||
OCALLPART,
|
||||
ORANGE,
|
||||
OFOR,
|
||||
OFORUNTIL,
|
||||
OSELECT,
|
||||
OTYPESW,
|
||||
OGO,
|
||||
ODEFER,
|
||||
ODCLTYPE, // can't print yet
|
||||
OBREAK,
|
||||
ORETJMP:
|
||||
v.reason = "unhandled op " + n.Op.String()
|
||||
return true
|
||||
|
|
@ -400,10 +389,23 @@ func (v *hairyVisitor) visit(n *Node) bool {
|
|||
case OAPPEND:
|
||||
v.budget -= inlineExtraAppendCost
|
||||
|
||||
case ODCLCONST, OEMPTY, OFALL, OLABEL:
|
||||
case ODCLCONST, OEMPTY, OFALL:
|
||||
// These nodes don't produce code; omit from inlining budget.
|
||||
return false
|
||||
|
||||
case OLABEL:
|
||||
// TODO(mdempsky): Add support for inlining labeled control statements.
|
||||
if n.labeledControl() != nil {
|
||||
v.reason = "labeled control"
|
||||
return true
|
||||
}
|
||||
|
||||
case OBREAK, OCONTINUE:
|
||||
if n.Sym != nil {
|
||||
// Should have short-circuited due to labeledControl above.
|
||||
Fatalf("unexpected labeled break/continue: %v", n)
|
||||
}
|
||||
|
||||
case OIF:
|
||||
if Isconst(n.Left, CTBOOL) {
|
||||
// This if and the condition cost nothing.
|
||||
|
|
@ -669,53 +671,11 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node {
|
|||
if Debug['m'] > 3 {
|
||||
fmt.Printf("%v:call to func %+v\n", n.Line(), n.Left)
|
||||
}
|
||||
if n.Left.Func != nil && n.Left.Func.Inl != nil && !isIntrinsicCall(n) { // normal case
|
||||
n = mkinlcall(n, n.Left, maxCost, inlMap)
|
||||
} else if n.Left.isMethodExpression() && asNode(n.Left.Sym.Def) != nil {
|
||||
n = mkinlcall(n, asNode(n.Left.Sym.Def), maxCost, inlMap)
|
||||
} else if n.Left.Op == OCLOSURE {
|
||||
if f := inlinableClosure(n.Left); f != nil {
|
||||
n = mkinlcall(n, f, maxCost, inlMap)
|
||||
}
|
||||
} else if n.Left.Op == ONAME && n.Left.Name != nil && n.Left.Name.Defn != nil {
|
||||
if d := n.Left.Name.Defn; d.Op == OAS && d.Right.Op == OCLOSURE {
|
||||
if f := inlinableClosure(d.Right); f != nil {
|
||||
// NB: this check is necessary to prevent indirect re-assignment of the variable
|
||||
// having the address taken after the invocation or only used for reads is actually fine
|
||||
// but we have no easy way to distinguish the safe cases
|
||||
if d.Left.Name.Addrtaken() {
|
||||
if Debug['m'] > 1 {
|
||||
fmt.Printf("%v: cannot inline escaping closure variable %v\n", n.Line(), n.Left)
|
||||
}
|
||||
if logopt.Enabled() {
|
||||
logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", Curfn.funcname(),
|
||||
fmt.Sprintf("%v cannot be inlined (escaping closure variable)", n.Left))
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
// ensure the variable is never re-assigned
|
||||
if unsafe, a := reassigned(n.Left); unsafe {
|
||||
if Debug['m'] > 1 {
|
||||
if a != nil {
|
||||
fmt.Printf("%v: cannot inline re-assigned closure variable at %v: %v\n", n.Line(), a.Line(), a)
|
||||
if logopt.Enabled() {
|
||||
logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", Curfn.funcname(),
|
||||
fmt.Sprintf("%v cannot be inlined (re-assigned closure variable)", a))
|
||||
}
|
||||
} else {
|
||||
fmt.Printf("%v: cannot inline global closure variable %v\n", n.Line(), n.Left)
|
||||
if logopt.Enabled() {
|
||||
logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", Curfn.funcname(),
|
||||
fmt.Sprintf("%v cannot be inlined (global closure variable)", n.Left))
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
n = mkinlcall(n, f, maxCost, inlMap)
|
||||
}
|
||||
}
|
||||
if isIntrinsicCall(n) {
|
||||
break
|
||||
}
|
||||
if fn := inlCallee(n.Left); fn != nil && fn.Func.Inl != nil {
|
||||
n = mkinlcall(n, fn, maxCost, inlMap)
|
||||
}
|
||||
|
||||
case OCALLMETH:
|
||||
|
|
@ -739,16 +699,73 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node {
|
|||
return n
|
||||
}
|
||||
|
||||
// inlinableClosure takes an OCLOSURE node and follows linkage to the matching ONAME with
|
||||
// the inlinable body. Returns nil if the function is not inlinable.
|
||||
func inlinableClosure(n *Node) *Node {
|
||||
c := n.Func.Closure
|
||||
caninl(c)
|
||||
f := c.Func.Nname
|
||||
if f == nil || f.Func.Inl == nil {
|
||||
// inlCallee takes a function-typed expression and returns the underlying function ONAME
|
||||
// that it refers to if statically known. Otherwise, it returns nil.
|
||||
func inlCallee(fn *Node) *Node {
|
||||
fn = staticValue(fn)
|
||||
switch {
|
||||
case fn.Op == ONAME && fn.Class() == PFUNC:
|
||||
if fn.isMethodExpression() {
|
||||
return asNode(fn.Sym.Def)
|
||||
}
|
||||
return fn
|
||||
case fn.Op == OCLOSURE:
|
||||
c := fn.Func.Closure
|
||||
caninl(c)
|
||||
return c.Func.Nname
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func staticValue(n *Node) *Node {
|
||||
for {
|
||||
n1 := staticValue1(n)
|
||||
if n1 == nil {
|
||||
return n
|
||||
}
|
||||
n = n1
|
||||
}
|
||||
}
|
||||
|
||||
// staticValue1 implements a simple SSA-like optimization. If n is a local variable
|
||||
// that is initialized and never reassigned, staticValue1 returns the initializer
|
||||
// expression. Otherwise, it returns nil.
|
||||
func staticValue1(n *Node) *Node {
|
||||
if n.Op != ONAME || n.Class() != PAUTO || n.Name.Addrtaken() {
|
||||
return nil
|
||||
}
|
||||
return f
|
||||
|
||||
defn := n.Name.Defn
|
||||
if defn == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var rhs *Node
|
||||
FindRHS:
|
||||
switch defn.Op {
|
||||
case OAS:
|
||||
rhs = defn.Right
|
||||
case OAS2:
|
||||
for i, lhs := range defn.List.Slice() {
|
||||
if lhs == n {
|
||||
rhs = defn.Rlist.Index(i)
|
||||
break FindRHS
|
||||
}
|
||||
}
|
||||
Fatalf("%v missing from LHS of %v", n, defn)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
if rhs == nil {
|
||||
Fatalf("RHS is nil: %v", defn)
|
||||
}
|
||||
|
||||
unsafe, _ := reassigned(n)
|
||||
if unsafe {
|
||||
return nil
|
||||
}
|
||||
|
||||
return rhs
|
||||
}
|
||||
|
||||
// reassigned takes an ONAME node, walks the function in which it is defined, and returns a boolean
|
||||
|
|
@ -831,16 +848,19 @@ func (v *reassignVisitor) visitList(l Nodes) *Node {
|
|||
return nil
|
||||
}
|
||||
|
||||
func tinlvar(t *types.Field, inlvars map[*Node]*Node) *Node {
|
||||
if n := asNode(t.Nname); n != nil && !n.isBlank() {
|
||||
inlvar := inlvars[n]
|
||||
if inlvar == nil {
|
||||
Fatalf("missing inlvar for %v\n", n)
|
||||
}
|
||||
return inlvar
|
||||
func inlParam(t *types.Field, as *Node, inlvars map[*Node]*Node) *Node {
|
||||
n := asNode(t.Nname)
|
||||
if n == nil || n.isBlank() {
|
||||
return nblank
|
||||
}
|
||||
|
||||
return typecheck(nblank, ctxExpr|ctxAssign)
|
||||
inlvar := inlvars[n]
|
||||
if inlvar == nil {
|
||||
Fatalf("missing inlvar for %v", n)
|
||||
}
|
||||
as.Ninit.Append(nod(ODCL, inlvar, nil))
|
||||
inlvar.Name.Defn = as
|
||||
return inlvar
|
||||
}
|
||||
|
||||
var inlgen int
|
||||
|
|
@ -970,14 +990,15 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node {
|
|||
continue
|
||||
}
|
||||
if ln.isParamStackCopy() { // ignore the on-stack copy of a parameter that moved to the heap
|
||||
continue
|
||||
}
|
||||
inlvars[ln] = typecheck(inlvar(ln), ctxExpr)
|
||||
if ln.Class() == PPARAM || ln.Name.Param.Stackcopy != nil && ln.Name.Param.Stackcopy.Class() == PPARAM {
|
||||
ninit.Append(nod(ODCL, inlvars[ln], nil))
|
||||
// TODO(mdempsky): Remove once I'm confident
|
||||
// this never actually happens. We currently
|
||||
// perform inlining before escape analysis, so
|
||||
// nothing should have moved to the heap yet.
|
||||
Fatalf("impossible: %v", ln)
|
||||
}
|
||||
inlf := typecheck(inlvar(ln), ctxExpr)
|
||||
inlvars[ln] = inlf
|
||||
if genDwarfInline > 0 {
|
||||
inlf := inlvars[ln]
|
||||
if ln.Class() == PPARAM {
|
||||
inlf.Name.SetInlFormal(true)
|
||||
} else {
|
||||
|
|
@ -1019,56 +1040,42 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node {
|
|||
|
||||
// Assign arguments to the parameters' temp names.
|
||||
as := nod(OAS2, nil, nil)
|
||||
as.Rlist.Set(n.List.Slice())
|
||||
as.SetColas(true)
|
||||
if n.Op == OCALLMETH {
|
||||
if n.Left.Left == nil {
|
||||
Fatalf("method call without receiver: %+v", n)
|
||||
}
|
||||
as.Rlist.Append(n.Left.Left)
|
||||
}
|
||||
as.Rlist.Append(n.List.Slice()...)
|
||||
|
||||
// For non-dotted calls to variadic functions, we assign the
|
||||
// variadic parameter's temp name separately.
|
||||
var vas *Node
|
||||
|
||||
if fn.IsMethod() {
|
||||
rcv := fn.Type.Recv()
|
||||
|
||||
if n.Left.Op == ODOTMETH {
|
||||
// For x.M(...), assign x directly to the
|
||||
// receiver parameter.
|
||||
if n.Left.Left == nil {
|
||||
Fatalf("method call without receiver: %+v", n)
|
||||
}
|
||||
ras := nod(OAS, tinlvar(rcv, inlvars), n.Left.Left)
|
||||
ras = typecheck(ras, ctxStmt)
|
||||
ninit.Append(ras)
|
||||
} else {
|
||||
// For T.M(...), add the receiver parameter to
|
||||
// as.List, so it's assigned by the normal
|
||||
// arguments.
|
||||
if as.Rlist.Len() == 0 {
|
||||
Fatalf("non-method call to method without first arg: %+v", n)
|
||||
}
|
||||
as.List.Append(tinlvar(rcv, inlvars))
|
||||
}
|
||||
if recv := fn.Type.Recv(); recv != nil {
|
||||
as.List.Append(inlParam(recv, as, inlvars))
|
||||
}
|
||||
|
||||
for _, param := range fn.Type.Params().Fields().Slice() {
|
||||
// For ordinary parameters or variadic parameters in
|
||||
// dotted calls, just add the variable to the
|
||||
// assignment list, and we're done.
|
||||
if !param.IsDDD() || n.IsDDD() {
|
||||
as.List.Append(tinlvar(param, inlvars))
|
||||
as.List.Append(inlParam(param, as, inlvars))
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, we need to collect the remaining values
|
||||
// to pass as a slice.
|
||||
|
||||
numvals := n.List.Len()
|
||||
|
||||
x := as.List.Len()
|
||||
for as.List.Len() < numvals {
|
||||
for as.List.Len() < as.Rlist.Len() {
|
||||
as.List.Append(argvar(param.Type, as.List.Len()))
|
||||
}
|
||||
varargs := as.List.Slice()[x:]
|
||||
|
||||
vas = nod(OAS, tinlvar(param, inlvars), nil)
|
||||
vas = nod(OAS, nil, nil)
|
||||
vas.Left = inlParam(param, vas, inlvars)
|
||||
if len(varargs) == 0 {
|
||||
vas.Right = nodnil()
|
||||
vas.Right.Type = param.Type
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ func TestIntendedInlining(t *testing.T) {
|
|||
"puintptr.ptr",
|
||||
"spanOf",
|
||||
"spanOfUnchecked",
|
||||
//"(*gcWork).putFast", // TODO(austin): For debugging #27993
|
||||
"(*gcWork).putFast",
|
||||
"(*gcWork).tryGetFast",
|
||||
"(*guintptr).set",
|
||||
"(*markBits).advance",
|
||||
|
|
@ -115,6 +115,7 @@ func TestIntendedInlining(t *testing.T) {
|
|||
"byLiteral.Len",
|
||||
"byLiteral.Less",
|
||||
"byLiteral.Swap",
|
||||
"(*dictDecoder).tryWriteCopy",
|
||||
},
|
||||
"encoding/base64": {
|
||||
"assemble32",
|
||||
|
|
|
|||
|
|
@ -48,8 +48,11 @@ const (
|
|||
Nowritebarrierrec // error on write barrier in this or recursive callees
|
||||
Yeswritebarrierrec // cancels Nowritebarrierrec in this function and callees
|
||||
|
||||
// Runtime-only type pragmas
|
||||
// Runtime and cgo type pragmas
|
||||
NotInHeap // values of this type must not be heap allocated
|
||||
|
||||
// Go command pragmas
|
||||
GoBuildPragma
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
@ -71,6 +74,8 @@ const (
|
|||
|
||||
func pragmaFlag(verb string) PragmaFlag {
|
||||
switch verb {
|
||||
case "go:build":
|
||||
return GoBuildPragma
|
||||
case "go:nointerface":
|
||||
if objabi.Fieldtrack_enabled != 0 {
|
||||
return Nointerface
|
||||
|
|
|
|||
|
|
@ -516,6 +516,7 @@ func Main(archInit func(*Arch)) {
|
|||
}
|
||||
|
||||
ssaDump = os.Getenv("GOSSAFUNC")
|
||||
ssaDir = os.Getenv("GOSSADIR")
|
||||
if ssaDump != "" {
|
||||
if strings.HasSuffix(ssaDump, "+") {
|
||||
ssaDump = ssaDump[:len(ssaDump)-1]
|
||||
|
|
@ -967,9 +968,10 @@ func readSymABIs(file, myimportpath string) {
|
|||
if len(parts) != 3 {
|
||||
log.Fatalf(`%s:%d: invalid symabi: syntax is "%s sym abi"`, file, lineNum, parts[0])
|
||||
}
|
||||
sym, abi := parts[1], parts[2]
|
||||
if abi != "ABI0" { // Only supported external ABI right now
|
||||
log.Fatalf(`%s:%d: invalid symabi: unknown abi "%s"`, file, lineNum, abi)
|
||||
sym, abistr := parts[1], parts[2]
|
||||
abi, valid := obj.ParseABI(abistr)
|
||||
if !valid {
|
||||
log.Fatalf(`%s:%d: invalid symabi: unknown abi "%s"`, file, lineNum, abistr)
|
||||
}
|
||||
|
||||
// If the symbol is already prefixed with
|
||||
|
|
@ -982,9 +984,9 @@ func readSymABIs(file, myimportpath string) {
|
|||
|
||||
// Record for later.
|
||||
if parts[0] == "def" {
|
||||
symabiDefs[sym] = obj.ABI0
|
||||
symabiDefs[sym] = abi
|
||||
} else {
|
||||
symabiRefs[sym] = obj.ABI0
|
||||
symabiRefs[sym] = abi
|
||||
}
|
||||
default:
|
||||
log.Fatalf(`%s:%d: invalid symabi type "%s"`, file, lineNum, parts[0])
|
||||
|
|
|
|||
|
|
@ -242,6 +242,7 @@ func (p *noder) node() {
|
|||
mkpackage(p.file.PkgName.Value)
|
||||
|
||||
if pragma, ok := p.file.Pragma.(*Pragma); ok {
|
||||
pragma.Flag &^= GoBuildPragma
|
||||
p.checkUnused(pragma)
|
||||
}
|
||||
|
||||
|
|
@ -773,7 +774,7 @@ func (p *noder) sum(x syntax.Expr) *Node {
|
|||
n := p.expr(x)
|
||||
if Isconst(n, CTSTR) && n.Sym == nil {
|
||||
nstr = n
|
||||
chunks = append(chunks, strlit(nstr))
|
||||
chunks = append(chunks, nstr.StringVal())
|
||||
}
|
||||
|
||||
for i := len(adds) - 1; i >= 0; i-- {
|
||||
|
|
@ -783,12 +784,12 @@ func (p *noder) sum(x syntax.Expr) *Node {
|
|||
if Isconst(r, CTSTR) && r.Sym == nil {
|
||||
if nstr != nil {
|
||||
// Collapse r into nstr instead of adding to n.
|
||||
chunks = append(chunks, strlit(r))
|
||||
chunks = append(chunks, r.StringVal())
|
||||
continue
|
||||
}
|
||||
|
||||
nstr = r
|
||||
chunks = append(chunks, strlit(nstr))
|
||||
chunks = append(chunks, nstr.StringVal())
|
||||
} else {
|
||||
if len(chunks) > 1 {
|
||||
nstr.SetVal(Val{U: strings.Join(chunks, "")})
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ func dumpGlobalConst(n *Node) {
|
|||
default:
|
||||
return
|
||||
}
|
||||
Ctxt.DwarfIntConst(myimportpath, n.Sym.Name, typesymname(t), n.Int64())
|
||||
Ctxt.DwarfIntConst(myimportpath, n.Sym.Name, typesymname(t), n.Int64Val())
|
||||
}
|
||||
|
||||
func dumpglobls() {
|
||||
|
|
@ -305,20 +305,21 @@ func dumpglobls() {
|
|||
// global symbols can't be declared during parallel compilation.
|
||||
func addGCLocals() {
|
||||
for _, s := range Ctxt.Text {
|
||||
if s.Func == nil {
|
||||
fn := s.Func()
|
||||
if fn == nil {
|
||||
continue
|
||||
}
|
||||
for _, gcsym := range []*obj.LSym{s.Func.GCArgs, s.Func.GCLocals, s.Func.GCRegs} {
|
||||
for _, gcsym := range []*obj.LSym{fn.GCArgs, fn.GCLocals, fn.GCRegs} {
|
||||
if gcsym != nil && !gcsym.OnList() {
|
||||
ggloblsym(gcsym, int32(len(gcsym.P)), obj.RODATA|obj.DUPOK)
|
||||
}
|
||||
}
|
||||
if x := s.Func.StackObjects; x != nil {
|
||||
if x := fn.StackObjects; x != nil {
|
||||
attr := int16(obj.RODATA)
|
||||
ggloblsym(x, int32(len(x.P)), attr)
|
||||
x.Set(obj.AttrStatic, true)
|
||||
}
|
||||
if x := s.Func.OpenCodedDeferInfo; x != nil {
|
||||
if x := fn.OpenCodedDeferInfo; x != nil {
|
||||
ggloblsym(x, int32(len(x.P)), obj.RODATA|obj.DUPOK)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1102,7 +1102,7 @@ func (o *Order) expr(n, lhs *Node) *Node {
|
|||
haslit := false
|
||||
for _, n1 := range n.List.Slice() {
|
||||
hasbyte = hasbyte || n1.Op == OBYTES2STR
|
||||
haslit = haslit || n1.Op == OLITERAL && len(strlit(n1)) != 0
|
||||
haslit = haslit || n1.Op == OLITERAL && len(n1.StringVal()) != 0
|
||||
}
|
||||
|
||||
if haslit && hasbyte {
|
||||
|
|
@ -1274,7 +1274,7 @@ func (o *Order) expr(n, lhs *Node) *Node {
|
|||
var t *types.Type
|
||||
switch n.Op {
|
||||
case OSLICELIT:
|
||||
t = types.NewArray(n.Type.Elem(), n.Right.Int64())
|
||||
t = types.NewArray(n.Type.Elem(), n.Right.Int64Val())
|
||||
case OCALLPART:
|
||||
t = partialCallType(n)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -231,6 +231,11 @@ func compile(fn *Node) {
|
|||
return
|
||||
}
|
||||
|
||||
// Set up the function's LSym early to avoid data races with the assemblers.
|
||||
// Do this before walk, as walk needs the LSym to set attributes/relocations
|
||||
// (e.g. in markTypeUsedInInterface).
|
||||
fn.Func.initLSym(true)
|
||||
|
||||
walk(fn)
|
||||
if nerrors != 0 {
|
||||
return
|
||||
|
|
@ -250,9 +255,6 @@ func compile(fn *Node) {
|
|||
return
|
||||
}
|
||||
|
||||
// Set up the function's LSym early to avoid data races with the assemblers.
|
||||
fn.Func.initLSym(true)
|
||||
|
||||
// Make sure type syms are declared for all types that might
|
||||
// be types of stack objects. We need to do this here
|
||||
// because symbols must be allocated before the parallel
|
||||
|
|
@ -264,8 +266,8 @@ func compile(fn *Node) {
|
|||
dtypesym(n.Type)
|
||||
// Also make sure we allocate a linker symbol
|
||||
// for the stack object data, for the same reason.
|
||||
if fn.Func.lsym.Func.StackObjects == nil {
|
||||
fn.Func.lsym.Func.StackObjects = Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
|
||||
if fn.Func.lsym.Func().StackObjects == nil {
|
||||
fn.Func.lsym.Func().StackObjects = Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -413,7 +415,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
|
|||
case PAUTO:
|
||||
if !n.Name.Used() {
|
||||
// Text == nil -> generating abstract function
|
||||
if fnsym.Func.Text != nil {
|
||||
if fnsym.Func().Text != nil {
|
||||
Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
|
||||
}
|
||||
continue
|
||||
|
|
@ -423,7 +425,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
|
|||
continue
|
||||
}
|
||||
apdecls = append(apdecls, n)
|
||||
fnsym.Func.RecordAutoType(ngotype(n).Linksym())
|
||||
fnsym.Func().RecordAutoType(ngotype(n).Linksym())
|
||||
}
|
||||
|
||||
decls, dwarfVars := createDwarfVars(fnsym, fn.Func, apdecls)
|
||||
|
|
@ -433,7 +435,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
|
|||
// the function symbol to insure that the type included in DWARF
|
||||
// processing during linking.
|
||||
typesyms := []*obj.LSym{}
|
||||
for t, _ := range fnsym.Func.Autot {
|
||||
for t, _ := range fnsym.Func().Autot {
|
||||
typesyms = append(typesyms, t)
|
||||
}
|
||||
sort.Sort(obj.BySymName(typesyms))
|
||||
|
|
@ -442,7 +444,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
|
|||
r.Sym = sym
|
||||
r.Type = objabi.R_USETYPE
|
||||
}
|
||||
fnsym.Func.Autot = nil
|
||||
fnsym.Func().Autot = nil
|
||||
|
||||
var varScopes []ScopeID
|
||||
for _, decl := range decls {
|
||||
|
|
@ -520,7 +522,7 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
|
|||
}
|
||||
|
||||
typename := dwarf.InfoPrefix + typesymname(n.Type)
|
||||
delete(fnsym.Func.Autot, ngotype(n).Linksym())
|
||||
delete(fnsym.Func().Autot, ngotype(n).Linksym())
|
||||
inlIndex := 0
|
||||
if genDwarfInline > 1 {
|
||||
if n.Name.InlFormal() || n.Name.InlLocal() {
|
||||
|
|
@ -665,7 +667,7 @@ func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dw
|
|||
ChildIndex: -1,
|
||||
})
|
||||
// Record go type of to insure that it gets emitted by the linker.
|
||||
fnsym.Func.RecordAutoType(ngotype(n).Linksym())
|
||||
fnsym.Func().RecordAutoType(ngotype(n).Linksym())
|
||||
}
|
||||
|
||||
return decls, vars
|
||||
|
|
@ -729,7 +731,7 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
|
|||
}
|
||||
|
||||
gotype := ngotype(n).Linksym()
|
||||
delete(fnsym.Func.Autot, gotype)
|
||||
delete(fnsym.Func().Autot, gotype)
|
||||
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
|
||||
inlIndex := 0
|
||||
if genDwarfInline > 1 {
|
||||
|
|
|
|||
|
|
@ -1552,26 +1552,27 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
|
|||
|
||||
// Emit the live pointer map data structures
|
||||
ls := e.curfn.Func.lsym
|
||||
ls.Func.GCArgs, ls.Func.GCLocals, ls.Func.GCRegs = lv.emit()
|
||||
fninfo := ls.Func()
|
||||
fninfo.GCArgs, fninfo.GCLocals, fninfo.GCRegs = lv.emit()
|
||||
|
||||
p := pp.Prog(obj.AFUNCDATA)
|
||||
Addrconst(&p.From, objabi.FUNCDATA_ArgsPointerMaps)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = ls.Func.GCArgs
|
||||
p.To.Sym = fninfo.GCArgs
|
||||
|
||||
p = pp.Prog(obj.AFUNCDATA)
|
||||
Addrconst(&p.From, objabi.FUNCDATA_LocalsPointerMaps)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = ls.Func.GCLocals
|
||||
p.To.Sym = fninfo.GCLocals
|
||||
|
||||
if !go115ReduceLiveness {
|
||||
p = pp.Prog(obj.AFUNCDATA)
|
||||
Addrconst(&p.From, objabi.FUNCDATA_RegPointerMaps)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = ls.Func.GCRegs
|
||||
p.To.Sym = fninfo.GCRegs
|
||||
}
|
||||
|
||||
return lv.livenessMap
|
||||
|
|
|
|||
|
|
@ -112,12 +112,13 @@ func typecheckrangeExpr(n *Node) {
|
|||
v2 = nil
|
||||
}
|
||||
|
||||
var why string
|
||||
if v1 != nil {
|
||||
if v1.Name != nil && v1.Name.Defn == n {
|
||||
v1.Type = t1
|
||||
} else if v1.Type != nil && assignop(t1, v1.Type, &why) == 0 {
|
||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
|
||||
} else if v1.Type != nil {
|
||||
if op, why := assignop(t1, v1.Type); op == OXXX {
|
||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
|
||||
}
|
||||
}
|
||||
checkassign(n, v1)
|
||||
}
|
||||
|
|
@ -125,8 +126,10 @@ func typecheckrangeExpr(n *Node) {
|
|||
if v2 != nil {
|
||||
if v2.Name != nil && v2.Name.Defn == n {
|
||||
v2.Type = t2
|
||||
} else if v2.Type != nil && assignop(t2, v2.Type, &why) == 0 {
|
||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
|
||||
} else if v2.Type != nil {
|
||||
if op, why := assignop(t2, v2.Type); op == OXXX {
|
||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
|
||||
}
|
||||
}
|
||||
checkassign(n, v2)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -61,8 +61,9 @@ const (
|
|||
MAXELEMSIZE = 128
|
||||
)
|
||||
|
||||
func structfieldSize() int { return 3 * Widthptr } // Sizeof(runtime.structfield{})
|
||||
func imethodSize() int { return 4 + 4 } // Sizeof(runtime.imethod{})
|
||||
func structfieldSize() int { return 3 * Widthptr } // Sizeof(runtime.structfield{})
|
||||
func imethodSize() int { return 4 + 4 } // Sizeof(runtime.imethod{})
|
||||
func commonSize() int { return 4*Widthptr + 8 + 8 } // Sizeof(runtime._type{})
|
||||
|
||||
func uncommonSize(t *types.Type) int { // Sizeof(runtime.uncommontype{})
|
||||
if t.Sym == nil && len(methods(t)) == 0 {
|
||||
|
|
@ -510,6 +511,7 @@ func dimportpath(p *types.Pkg) {
|
|||
s := Ctxt.Lookup("type..importpath." + p.Prefix + ".")
|
||||
ot := dnameData(s, 0, str, "", nil, false)
|
||||
ggloblsym(s, int32(ot), obj.DUPOK|obj.RODATA)
|
||||
s.Set(obj.AttrContentAddressable, true)
|
||||
p.Pathsym = s
|
||||
}
|
||||
|
||||
|
|
@ -637,6 +639,7 @@ func dname(name, tag string, pkg *types.Pkg, exported bool) *obj.LSym {
|
|||
}
|
||||
ot := dnameData(s, 0, name, tag, pkg, exported)
|
||||
ggloblsym(s, int32(ot), obj.DUPOK|obj.RODATA)
|
||||
s.Set(obj.AttrContentAddressable, true)
|
||||
return s
|
||||
}
|
||||
|
||||
|
|
@ -1272,8 +1275,9 @@ func dtypesym(t *types.Type) *obj.LSym {
|
|||
}
|
||||
ot = dgopkgpath(lsym, ot, tpkg)
|
||||
|
||||
xcount := sort.Search(n, func(i int) bool { return !types.IsExported(m[i].name.Name) })
|
||||
ot = dsymptr(lsym, ot, lsym, ot+3*Widthptr+uncommonSize(t))
|
||||
ot = duintptr(lsym, ot, uint64(n))
|
||||
ot = duintptr(lsym, ot, uint64(xcount))
|
||||
ot = duintptr(lsym, ot, uint64(n))
|
||||
dataAdd := imethodSize() * n
|
||||
ot = dextratype(lsym, ot, t, dataAdd)
|
||||
|
|
@ -1449,6 +1453,20 @@ func dtypesym(t *types.Type) *obj.LSym {
|
|||
return lsym
|
||||
}
|
||||
|
||||
// ifaceMethodOffset returns the offset of the i-th method in the interface
|
||||
// type descriptor, ityp.
|
||||
func ifaceMethodOffset(ityp *types.Type, i int64) int64 {
|
||||
// interface type descriptor layout is struct {
|
||||
// _type // commonSize
|
||||
// pkgpath // 1 word
|
||||
// []imethod // 3 words (pointing to [...]imethod below)
|
||||
// uncommontype // uncommonSize
|
||||
// [...]imethod
|
||||
// }
|
||||
// The size of imethod is 8.
|
||||
return int64(commonSize()+4*Widthptr+uncommonSize(ityp)) + i*8
|
||||
}
|
||||
|
||||
// for each itabEntry, gather the methods on
|
||||
// the concrete type that implement the interface
|
||||
func peekitabs() {
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ func scopePCs(fnsym *obj.LSym, marks []Mark, dwarfScopes []dwarf.Scope) {
|
|||
if len(marks) == 0 {
|
||||
return
|
||||
}
|
||||
p0 := fnsym.Func.Text
|
||||
p0 := fnsym.Func().Text
|
||||
scope := findScope(marks, p0.Pos)
|
||||
for p := fnsym.Func.Text; p != nil; p = p.Link {
|
||||
for p := p0; p != nil; p = p.Link {
|
||||
if p.Pos == p0.Pos {
|
||||
continue
|
||||
}
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ func (s *InitSchedule) staticcopy(l *Node, r *Node) bool {
|
|||
case OSLICELIT:
|
||||
// copy slice
|
||||
a := s.inittemps[r]
|
||||
slicesym(l, a, r.Right.Int64())
|
||||
slicesym(l, a, r.Right.Int64Val())
|
||||
return true
|
||||
|
||||
case OARRAYLIT, OSTRUCTLIT:
|
||||
|
|
@ -205,7 +205,7 @@ func (s *InitSchedule) staticassign(l *Node, r *Node) bool {
|
|||
|
||||
case OSTR2BYTES:
|
||||
if l.Class() == PEXTERN && r.Left.Op == OLITERAL {
|
||||
sval := strlit(r.Left)
|
||||
sval := r.Left.StringVal()
|
||||
slicebytes(l, sval)
|
||||
return true
|
||||
}
|
||||
|
|
@ -213,7 +213,7 @@ func (s *InitSchedule) staticassign(l *Node, r *Node) bool {
|
|||
case OSLICELIT:
|
||||
s.initplan(r)
|
||||
// Init slice.
|
||||
bound := r.Right.Int64()
|
||||
bound := r.Right.Int64Val()
|
||||
ta := types.NewArray(r.Type.Elem(), bound)
|
||||
ta.SetNoalg(true)
|
||||
a := staticname(ta)
|
||||
|
|
@ -278,7 +278,7 @@ func (s *InitSchedule) staticassign(l *Node, r *Node) bool {
|
|||
return Isconst(val, CTNIL)
|
||||
}
|
||||
|
||||
markTypeUsedInInterface(val.Type)
|
||||
markTypeUsedInInterface(val.Type, l.Sym.Linksym())
|
||||
|
||||
var itab *Node
|
||||
if l.Type.IsEmptyInterface() {
|
||||
|
|
@ -413,7 +413,7 @@ func getdyn(n *Node, top bool) initGenType {
|
|||
if !top {
|
||||
return initDynamic
|
||||
}
|
||||
if n.Right.Int64()/4 > int64(n.List.Len()) {
|
||||
if n.Right.Int64Val()/4 > int64(n.List.Len()) {
|
||||
// <25% of entries have explicit values.
|
||||
// Very rough estimation, it takes 4 bytes of instructions
|
||||
// to initialize 1 byte of result. So don't use a static
|
||||
|
|
@ -589,12 +589,12 @@ func isSmallSliceLit(n *Node) bool {
|
|||
|
||||
r := n.Right
|
||||
|
||||
return smallintconst(r) && (n.Type.Elem().Width == 0 || r.Int64() <= smallArrayBytes/n.Type.Elem().Width)
|
||||
return smallintconst(r) && (n.Type.Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type.Elem().Width)
|
||||
}
|
||||
|
||||
func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
|
||||
// make an array type corresponding the number of elements we have
|
||||
t := types.NewArray(n.Type.Elem(), n.Right.Int64())
|
||||
t := types.NewArray(n.Type.Elem(), n.Right.Int64Val())
|
||||
dowidth(t)
|
||||
|
||||
if ctxt == inNonInitFunction {
|
||||
|
|
@ -993,7 +993,7 @@ func oaslit(n *Node, init *Nodes) bool {
|
|||
|
||||
func getlit(lit *Node) int {
|
||||
if smallintconst(lit) {
|
||||
return int(lit.Int64())
|
||||
return int(lit.Int64Val())
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,8 +9,8 @@ import (
|
|||
"fmt"
|
||||
"html"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"bufio"
|
||||
"bytes"
|
||||
|
|
@ -27,6 +27,7 @@ var ssaConfig *ssa.Config
|
|||
var ssaCaches []ssa.Cache
|
||||
|
||||
var ssaDump string // early copy of $GOSSAFUNC; the func name to dump output for
|
||||
var ssaDir string // optional destination for ssa dump file
|
||||
var ssaDumpStdout bool // whether to dump to stdout
|
||||
var ssaDumpCFG string // generate CFGs for these phases
|
||||
const ssaDumpFile = "ssa.html"
|
||||
|
|
@ -49,21 +50,16 @@ func initssaconfig() {
|
|||
// Caching is disabled in the backend, so generating these here avoids allocations.
|
||||
_ = types.NewPtr(types.Types[TINTER]) // *interface{}
|
||||
_ = types.NewPtr(types.NewPtr(types.Types[TSTRING])) // **string
|
||||
_ = types.NewPtr(types.NewPtr(types.Idealstring)) // **string
|
||||
_ = types.NewPtr(types.NewSlice(types.Types[TINTER])) // *[]interface{}
|
||||
_ = types.NewPtr(types.NewPtr(types.Bytetype)) // **byte
|
||||
_ = types.NewPtr(types.NewSlice(types.Bytetype)) // *[]byte
|
||||
_ = types.NewPtr(types.NewSlice(types.Types[TSTRING])) // *[]string
|
||||
_ = types.NewPtr(types.NewSlice(types.Idealstring)) // *[]string
|
||||
_ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[TUINT8]))) // ***uint8
|
||||
_ = types.NewPtr(types.Types[TINT16]) // *int16
|
||||
_ = types.NewPtr(types.Types[TINT64]) // *int64
|
||||
_ = types.NewPtr(types.Errortype) // *error
|
||||
types.NewPtrCacheEnabled = false
|
||||
ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, Ctxt, Debug['N'] == 0)
|
||||
if thearch.LinkArch.Name == "386" {
|
||||
ssaConfig.Set387(thearch.Use387)
|
||||
}
|
||||
ssaConfig.SoftFloat = thearch.SoftFloat
|
||||
ssaConfig.Race = flag_race
|
||||
ssaCaches = make([]ssa.Cache, nBackendWorkers)
|
||||
|
|
@ -174,10 +170,6 @@ func initssaconfig() {
|
|||
ExtendCheckFunc[ssa.BoundsSlice3CU] = sysvar("panicExtendSlice3CU")
|
||||
}
|
||||
|
||||
// GO386=387 runtime definitions
|
||||
ControlWord64trunc = sysvar("controlWord64trunc") // uint16
|
||||
ControlWord32 = sysvar("controlWord32") // uint16
|
||||
|
||||
// Wasm (all asm funcs with special ABIs)
|
||||
WasmMove = sysvar("wasmMove")
|
||||
WasmZero = sysvar("wasmZero")
|
||||
|
|
@ -248,7 +240,7 @@ func dvarint(x *obj.LSym, off int, v int64) int {
|
|||
// - Offset of where argument should be placed in the args frame when making call
|
||||
func (s *state) emitOpenDeferInfo() {
|
||||
x := Ctxt.Lookup(s.curfn.Func.lsym.Name + ".opendefer")
|
||||
s.curfn.Func.lsym.Func.OpenCodedDeferInfo = x
|
||||
s.curfn.Func.lsym.Func().OpenCodedDeferInfo = x
|
||||
off := 0
|
||||
|
||||
// Compute maxargsize (max size of arguments for all defers)
|
||||
|
|
@ -347,7 +339,13 @@ func buildssa(fn *Node, worker int) *ssa.Func {
|
|||
s.f.Entry.Pos = fn.Pos
|
||||
|
||||
if printssa {
|
||||
s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDumpFile, s.f, ssaDumpCFG)
|
||||
ssaDF := ssaDumpFile
|
||||
if ssaDir != "" {
|
||||
ssaDF = filepath.Join(ssaDir, myimportpath+"."+name+".html")
|
||||
ssaD := filepath.Dir(ssaDF)
|
||||
os.MkdirAll(ssaD, 0755)
|
||||
}
|
||||
s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
|
||||
// TODO: generate and print a mapping from nodes to values and blocks
|
||||
dumpSourcesColumn(s.f.HTMLWriter, fn)
|
||||
s.f.HTMLWriter.WriteAST("AST", astBuf)
|
||||
|
|
@ -1273,7 +1271,7 @@ func (s *state) stmt(n *Node) {
|
|||
// We're assigning a slicing operation back to its source.
|
||||
// Don't write back fields we aren't changing. See issue #14855.
|
||||
i, j, k := rhs.SliceBounds()
|
||||
if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int64() == 0) {
|
||||
if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int64Val() == 0) {
|
||||
// [0:...] is the same as [:...]
|
||||
i = nil
|
||||
}
|
||||
|
|
@ -1303,7 +1301,7 @@ func (s *state) stmt(n *Node) {
|
|||
case OIF:
|
||||
if Isconst(n.Left, CTBOOL) {
|
||||
s.stmtList(n.Left.Ninit)
|
||||
if n.Left.Bool() {
|
||||
if n.Left.BoolVal() {
|
||||
s.stmtList(n.Nbody)
|
||||
} else {
|
||||
s.stmtList(n.Rlist)
|
||||
|
|
@ -2557,22 +2555,22 @@ func (s *state) expr(n *Node) *ssa.Value {
|
|||
return s.addr(n.Left)
|
||||
|
||||
case ORESULT:
|
||||
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall {
|
||||
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
|
||||
// Do the old thing
|
||||
addr := s.constOffPtrSP(types.NewPtr(n.Type), n.Xoffset)
|
||||
return s.load(n.Type, addr)
|
||||
return s.rawLoad(n.Type, addr)
|
||||
}
|
||||
which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Xoffset)
|
||||
if which == -1 {
|
||||
// Do the old thing // TODO: Panic instead.
|
||||
addr := s.constOffPtrSP(types.NewPtr(n.Type), n.Xoffset)
|
||||
return s.load(n.Type, addr)
|
||||
return s.rawLoad(n.Type, addr)
|
||||
}
|
||||
if canSSAType(n.Type) {
|
||||
return s.newValue1I(ssa.OpSelectN, n.Type, which, s.prevCall)
|
||||
} else {
|
||||
addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(n.Type), which, s.prevCall)
|
||||
return s.load(n.Type, addr)
|
||||
return s.rawLoad(n.Type, addr)
|
||||
}
|
||||
|
||||
case ODEREF:
|
||||
|
|
@ -2612,7 +2610,7 @@ func (s *state) expr(n *Node) *ssa.Value {
|
|||
// Replace "abc"[1] with 'b'.
|
||||
// Delayed until now because "abc"[1] is not an ideal constant.
|
||||
// See test/fixedbugs/issue11370.go.
|
||||
return s.newValue0I(ssa.OpConst8, types.Types[TUINT8], int64(int8(strlit(n.Left)[n.Right.Int64()])))
|
||||
return s.newValue0I(ssa.OpConst8, types.Types[TUINT8], int64(int8(n.Left.StringVal()[n.Right.Int64Val()])))
|
||||
}
|
||||
a := s.expr(n.Left)
|
||||
i := s.expr(n.Right)
|
||||
|
|
@ -2621,7 +2619,7 @@ func (s *state) expr(n *Node) *ssa.Value {
|
|||
ptrtyp := s.f.Config.Types.BytePtr
|
||||
ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
|
||||
if Isconst(n.Right, CTINT) {
|
||||
ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int64(), ptr)
|
||||
ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int64Val(), ptr)
|
||||
} else {
|
||||
ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
|
||||
}
|
||||
|
|
@ -4022,11 +4020,6 @@ func init() {
|
|||
return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[TUINT64], types.Types[TUINT64]), args[0], args[1])
|
||||
},
|
||||
sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64LE, sys.ArchPPC64, sys.ArchS390X)
|
||||
add("math/big", "divWW",
|
||||
func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
|
||||
return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[TUINT64], types.Types[TUINT64]), args[0], args[1], args[2])
|
||||
},
|
||||
sys.ArchAMD64)
|
||||
}
|
||||
|
||||
// findIntrinsic returns a function which builds the SSA equivalent of the
|
||||
|
|
@ -4256,6 +4249,7 @@ func (s *state) openDeferExit() {
|
|||
s.lastDeferExit = deferExit
|
||||
s.lastDeferCount = len(s.openDefers)
|
||||
zeroval := s.constInt8(types.Types[TUINT8], 0)
|
||||
testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
// Test for and run defers in reverse order
|
||||
for i := len(s.openDefers) - 1; i >= 0; i-- {
|
||||
r := s.openDefers[i]
|
||||
|
|
@ -4293,23 +4287,38 @@ func (s *state) openDeferExit() {
|
|||
stksize := fn.Type.ArgWidth()
|
||||
var ACArgs []ssa.Param
|
||||
var ACResults []ssa.Param
|
||||
var callArgs []*ssa.Value
|
||||
if r.rcvr != nil {
|
||||
// rcvr in case of OCALLINTER
|
||||
v := s.load(r.rcvr.Type.Elem(), r.rcvr)
|
||||
addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart)
|
||||
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(argStart)})
|
||||
s.store(types.Types[TUINTPTR], addr, v)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, v)
|
||||
} else {
|
||||
s.store(types.Types[TUINTPTR], addr, v)
|
||||
}
|
||||
}
|
||||
for j, argAddrVal := range r.argVals {
|
||||
f := getParam(r.n, j)
|
||||
pt := types.NewPtr(f.Type)
|
||||
addr := s.constOffPtrSP(pt, argStart+f.Offset)
|
||||
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(argStart + f.Offset)})
|
||||
if !canSSAType(f.Type) {
|
||||
s.move(f.Type, addr, argAddrVal)
|
||||
ACArgs = append(ACArgs, ssa.Param{Type: f.Type, Offset: int32(argStart + f.Offset)})
|
||||
if testLateExpansion {
|
||||
var a *ssa.Value
|
||||
if !canSSAType(f.Type) {
|
||||
a = s.newValue2(ssa.OpDereference, f.Type, argAddrVal, s.mem())
|
||||
} else {
|
||||
a = s.load(f.Type, argAddrVal)
|
||||
}
|
||||
callArgs = append(callArgs, a)
|
||||
} else {
|
||||
argVal := s.load(f.Type, argAddrVal)
|
||||
s.storeType(f.Type, addr, argVal, 0, false)
|
||||
addr := s.constOffPtrSP(pt, argStart+f.Offset)
|
||||
if !canSSAType(f.Type) {
|
||||
s.move(f.Type, addr, argAddrVal)
|
||||
} else {
|
||||
argVal := s.load(f.Type, argAddrVal)
|
||||
s.storeType(f.Type, addr, argVal, 0, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
var call *ssa.Value
|
||||
|
|
@ -4317,13 +4326,31 @@ func (s *state) openDeferExit() {
|
|||
v := s.load(r.closure.Type.Elem(), r.closure)
|
||||
s.maybeNilCheckClosure(v, callDefer)
|
||||
codeptr := s.rawLoad(types.Types[TUINTPTR], v)
|
||||
call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, ssa.ClosureAuxCall(ACArgs, ACResults), codeptr, v, s.mem())
|
||||
aux := ssa.ClosureAuxCall(ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, s.mem())
|
||||
call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, aux, codeptr, v, s.mem())
|
||||
}
|
||||
} else {
|
||||
// Do a static call if the original call was a static function or method
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(fn.Sym.Linksym(), ACArgs, ACResults), s.mem())
|
||||
aux := ssa.StaticAuxCall(fn.Sym.Linksym(), ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, s.mem())
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
// Do a static call if the original call was a static function or method
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
|
||||
}
|
||||
}
|
||||
call.AuxInt = stksize
|
||||
s.vars[&memVar] = call
|
||||
if testLateExpansion {
|
||||
s.vars[&memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
|
||||
} else {
|
||||
s.vars[&memVar] = call
|
||||
}
|
||||
// Make sure that the stack slots with pointers are kept live
|
||||
// through the call (which is a pre-emption point). Also, we will
|
||||
// use the first call of the last defer exit to compute liveness
|
||||
|
|
@ -4380,11 +4407,9 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
|
||||
switch n.Op {
|
||||
case OCALLFUNC:
|
||||
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
if k == callNormal && fn.Op == ONAME && fn.Class() == PFUNC {
|
||||
sym = fn.Sym
|
||||
if !returnResultAddr && strings.Contains(sym.Name, "testLateExpansion") {
|
||||
testLateExpansion = true
|
||||
}
|
||||
break
|
||||
}
|
||||
closure = s.expr(fn)
|
||||
|
|
@ -4397,11 +4422,9 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
if fn.Op != ODOTMETH {
|
||||
s.Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
|
||||
}
|
||||
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
if k == callNormal {
|
||||
sym = fn.Sym
|
||||
if !returnResultAddr && strings.Contains(sym.Name, "testLateExpansion") {
|
||||
testLateExpansion = true
|
||||
}
|
||||
break
|
||||
}
|
||||
closure = s.getMethodClosure(fn)
|
||||
|
|
@ -4411,6 +4434,7 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
if fn.Op != ODOTINTER {
|
||||
s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op)
|
||||
}
|
||||
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
var iclosure *ssa.Value
|
||||
iclosure, rcvr = s.getClosureAndRcvr(fn)
|
||||
if k == callNormal {
|
||||
|
|
@ -4429,6 +4453,7 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
|
||||
var call *ssa.Value
|
||||
if k == callDeferStack {
|
||||
testLateExpansion = ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
// Make a defer struct d on the stack.
|
||||
t := deferstruct(stksize)
|
||||
d := tempAt(n.Pos, s.curfn, t)
|
||||
|
|
@ -4479,10 +4504,17 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
}
|
||||
|
||||
// Call runtime.deferprocStack with pointer to _defer record.
|
||||
arg0 := s.constOffPtrSP(types.Types[TUINTPTR], Ctxt.FixedFrameSize())
|
||||
s.store(types.Types[TUINTPTR], arg0, addr)
|
||||
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(Ctxt.FixedFrameSize())})
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults), s.mem())
|
||||
aux := ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, addr, s.mem())
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
arg0 := s.constOffPtrSP(types.Types[TUINTPTR], Ctxt.FixedFrameSize())
|
||||
s.store(types.Types[TUINTPTR], arg0, addr)
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
|
||||
}
|
||||
if stksize < int64(Widthptr) {
|
||||
// We need room for both the call to deferprocStack and the call to
|
||||
// the deferred function.
|
||||
|
|
@ -4549,9 +4581,21 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
// call target
|
||||
switch {
|
||||
case k == callDefer:
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(deferproc, ACArgs, ACResults), s.mem())
|
||||
aux := ssa.StaticAuxCall(deferproc, ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
|
||||
}
|
||||
case k == callGo:
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(newproc, ACArgs, ACResults), s.mem())
|
||||
aux := ssa.StaticAuxCall(newproc, ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
|
||||
}
|
||||
case closure != nil:
|
||||
// rawLoad because loading the code pointer from a
|
||||
// closure is always safe, but IsSanitizerSafeAddr
|
||||
|
|
@ -4559,18 +4603,25 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
// critical that we not clobber any arguments already
|
||||
// stored onto the stack.
|
||||
codeptr = s.rawLoad(types.Types[TUINTPTR], closure)
|
||||
call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, ssa.ClosureAuxCall(ACArgs, ACResults), codeptr, closure, s.mem())
|
||||
if testLateExpansion {
|
||||
aux := ssa.ClosureAuxCall(ACArgs, ACResults)
|
||||
call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, ssa.ClosureAuxCall(ACArgs, ACResults), codeptr, closure, s.mem())
|
||||
}
|
||||
case codeptr != nil:
|
||||
call = s.newValue2A(ssa.OpInterCall, types.TypeMem, ssa.InterfaceAuxCall(ACArgs, ACResults), codeptr, s.mem())
|
||||
if testLateExpansion {
|
||||
aux := ssa.InterfaceAuxCall(ACArgs, ACResults)
|
||||
call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue2A(ssa.OpInterCall, types.TypeMem, ssa.InterfaceAuxCall(ACArgs, ACResults), codeptr, s.mem())
|
||||
}
|
||||
case sym != nil:
|
||||
if testLateExpansion {
|
||||
var tys []*types.Type
|
||||
aux := ssa.StaticAuxCall(sym.Linksym(), ACArgs, ACResults)
|
||||
for i := int64(0); i < aux.NResults(); i++ {
|
||||
tys = append(tys, aux.TypeOfResult(i))
|
||||
}
|
||||
tys = append(tys, types.TypeMem)
|
||||
call = s.newValue0A(ssa.OpStaticLECall, types.NewResults(tys), aux)
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
} else {
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(sym.Linksym(), ACArgs, ACResults), s.mem())
|
||||
|
|
@ -4611,7 +4662,11 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value {
|
|||
}
|
||||
fp := res.Field(0)
|
||||
if returnResultAddr {
|
||||
return s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+Ctxt.FixedFrameSize())
|
||||
pt := types.NewPtr(fp.Type)
|
||||
if testLateExpansion {
|
||||
return s.newValue1I(ssa.OpSelectNAddr, pt, 0, call)
|
||||
}
|
||||
return s.constOffPtrSP(pt, fp.Offset+Ctxt.FixedFrameSize())
|
||||
}
|
||||
|
||||
if testLateExpansion {
|
||||
|
|
@ -4715,7 +4770,7 @@ func (s *state) addr(n *Node) *ssa.Value {
|
|||
}
|
||||
case ORESULT:
|
||||
// load return from callee
|
||||
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall {
|
||||
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
|
||||
return s.constOffPtrSP(t, n.Xoffset)
|
||||
}
|
||||
which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Xoffset)
|
||||
|
|
@ -5018,15 +5073,22 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args .
|
|||
s.prevCall = nil
|
||||
// Write args to the stack
|
||||
off := Ctxt.FixedFrameSize()
|
||||
testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f)
|
||||
var ACArgs []ssa.Param
|
||||
var ACResults []ssa.Param
|
||||
var callArgs []*ssa.Value
|
||||
|
||||
for _, arg := range args {
|
||||
t := arg.Type
|
||||
off = Rnd(off, t.Alignment())
|
||||
ptr := s.constOffPtrSP(t.PtrTo(), off)
|
||||
size := t.Size()
|
||||
ACArgs = append(ACArgs, ssa.Param{Type: t, Offset: int32(off)})
|
||||
s.store(t, ptr, arg)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, arg)
|
||||
} else {
|
||||
ptr := s.constOffPtrSP(t.PtrTo(), off)
|
||||
s.store(t, ptr, arg)
|
||||
}
|
||||
off += size
|
||||
}
|
||||
off = Rnd(off, int64(Widthreg))
|
||||
|
|
@ -5040,8 +5102,17 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args .
|
|||
}
|
||||
|
||||
// Issue call
|
||||
call := s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(fn, ACArgs, ACResults), s.mem())
|
||||
s.vars[&memVar] = call
|
||||
var call *ssa.Value
|
||||
aux := ssa.StaticAuxCall(fn, ACArgs, ACResults)
|
||||
if testLateExpansion {
|
||||
callArgs = append(callArgs, s.mem())
|
||||
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
|
||||
call.AddArgs(callArgs...)
|
||||
s.vars[&memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
|
||||
} else {
|
||||
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
|
||||
s.vars[&memVar] = call
|
||||
}
|
||||
|
||||
if !returns {
|
||||
// Finish block
|
||||
|
|
@ -5057,11 +5128,24 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args .
|
|||
|
||||
// Load results
|
||||
res := make([]*ssa.Value, len(results))
|
||||
for i, t := range results {
|
||||
off = Rnd(off, t.Alignment())
|
||||
ptr := s.constOffPtrSP(types.NewPtr(t), off)
|
||||
res[i] = s.load(t, ptr)
|
||||
off += t.Size()
|
||||
if testLateExpansion {
|
||||
for i, t := range results {
|
||||
off = Rnd(off, t.Alignment())
|
||||
if canSSAType(t) {
|
||||
res[i] = s.newValue1I(ssa.OpSelectN, t, int64(i), call)
|
||||
} else {
|
||||
addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), int64(i), call)
|
||||
res[i] = s.rawLoad(t, addr)
|
||||
}
|
||||
off += t.Size()
|
||||
}
|
||||
} else {
|
||||
for i, t := range results {
|
||||
off = Rnd(off, t.Alignment())
|
||||
ptr := s.constOffPtrSP(types.NewPtr(t), off)
|
||||
res[i] = s.load(t, ptr)
|
||||
off += t.Size()
|
||||
}
|
||||
}
|
||||
off = Rnd(off, int64(Widthptr))
|
||||
|
||||
|
|
@ -5918,9 +6002,7 @@ type SSAGenState struct {
|
|||
// bstart remembers where each block starts (indexed by block ID)
|
||||
bstart []*obj.Prog
|
||||
|
||||
// 387 port: maps from SSE registers (REG_X?) to 387 registers (REG_F?)
|
||||
SSEto387 map[int16]int16
|
||||
// Some architectures require a 64-bit temporary for FP-related register shuffling. Examples include x86-387, PPC, and Sparc V8.
|
||||
// Some architectures require a 64-bit temporary for FP-related register shuffling. Examples include PPC and Sparc V8.
|
||||
ScratchFpMem *Node
|
||||
|
||||
maxarg int64 // largest frame size for arguments to calls made by the function
|
||||
|
|
@ -6026,7 +6108,7 @@ func emitStackObjects(e *ssafn, pp *Progs) {
|
|||
|
||||
// Populate the stack object data.
|
||||
// Format must match runtime/stack.go:stackObjectRecord.
|
||||
x := e.curfn.Func.lsym.Func.StackObjects
|
||||
x := e.curfn.Func.lsym.Func().StackObjects
|
||||
off := 0
|
||||
off = duintptr(x, off, uint64(len(vars)))
|
||||
for _, v := range vars {
|
||||
|
|
@ -6063,7 +6145,7 @@ func genssa(f *ssa.Func, pp *Progs) {
|
|||
s.livenessMap = liveness(e, f, pp)
|
||||
emitStackObjects(e, pp)
|
||||
|
||||
openDeferInfo := e.curfn.Func.lsym.Func.OpenCodedDeferInfo
|
||||
openDeferInfo := e.curfn.Func.lsym.Func().OpenCodedDeferInfo
|
||||
if openDeferInfo != nil {
|
||||
// This function uses open-coded defers -- write out the funcdata
|
||||
// info that we computed at the end of genssa.
|
||||
|
|
@ -6087,10 +6169,6 @@ func genssa(f *ssa.Func, pp *Progs) {
|
|||
progToBlock[s.pp.next] = f.Blocks[0]
|
||||
}
|
||||
|
||||
if thearch.Use387 {
|
||||
s.SSEto387 = map[int16]int16{}
|
||||
}
|
||||
|
||||
s.ScratchFpMem = e.scratchFpMem
|
||||
|
||||
if Ctxt.Flag_locationlists {
|
||||
|
|
@ -6272,7 +6350,7 @@ func genssa(f *ssa.Func, pp *Progs) {
|
|||
// some of the inline marks.
|
||||
// Use this instruction instead.
|
||||
p.Pos = p.Pos.WithIsStmt() // promote position to a statement
|
||||
pp.curfn.Func.lsym.Func.AddInlMark(p, inlMarks[m])
|
||||
pp.curfn.Func.lsym.Func().AddInlMark(p, inlMarks[m])
|
||||
// Make the inline mark a real nop, so it doesn't generate any code.
|
||||
m.As = obj.ANOP
|
||||
m.Pos = src.NoXPos
|
||||
|
|
@ -6284,7 +6362,7 @@ func genssa(f *ssa.Func, pp *Progs) {
|
|||
// Any unmatched inline marks now need to be added to the inlining tree (and will generate a nop instruction).
|
||||
for _, p := range inlMarkList {
|
||||
if p.As != obj.ANOP {
|
||||
pp.curfn.Func.lsym.Func.AddInlMark(p, inlMarks[p])
|
||||
pp.curfn.Func.lsym.Func().AddInlMark(p, inlMarks[p])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -546,22 +546,19 @@ func methtype(t *types.Type) *types.Type {
|
|||
|
||||
// Is type src assignment compatible to type dst?
|
||||
// If so, return op code to use in conversion.
|
||||
// If not, return OXXX.
|
||||
func assignop(src, dst *types.Type, why *string) Op {
|
||||
if why != nil {
|
||||
*why = ""
|
||||
}
|
||||
|
||||
// If not, return OXXX. In this case, the string return parameter may
|
||||
// hold a reason why. In all other cases, it'll be the empty string.
|
||||
func assignop(src, dst *types.Type) (Op, string) {
|
||||
if src == dst {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
if src == nil || dst == nil || src.Etype == TFORW || dst.Etype == TFORW || src.Orig == nil || dst.Orig == nil {
|
||||
return OXXX
|
||||
return OXXX, ""
|
||||
}
|
||||
|
||||
// 1. src type is identical to dst.
|
||||
if types.Identical(src, dst) {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
// 2. src and dst have identical underlying types
|
||||
|
|
@ -575,13 +572,13 @@ func assignop(src, dst *types.Type, why *string) Op {
|
|||
if src.IsEmptyInterface() {
|
||||
// Conversion between two empty interfaces
|
||||
// requires no code.
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
if (src.Sym == nil || dst.Sym == nil) && !src.IsInterface() {
|
||||
// Conversion between two types, at least one unnamed,
|
||||
// needs no conversion. The exception is nonempty interfaces
|
||||
// which need to have their itab updated.
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -590,49 +587,47 @@ func assignop(src, dst *types.Type, why *string) Op {
|
|||
var missing, have *types.Field
|
||||
var ptr int
|
||||
if implements(src, dst, &missing, &have, &ptr) {
|
||||
return OCONVIFACE
|
||||
return OCONVIFACE, ""
|
||||
}
|
||||
|
||||
// we'll have complained about this method anyway, suppress spurious messages.
|
||||
if have != nil && have.Sym == missing.Sym && (have.Type.Broke() || missing.Type.Broke()) {
|
||||
return OCONVIFACE
|
||||
return OCONVIFACE, ""
|
||||
}
|
||||
|
||||
if why != nil {
|
||||
if isptrto(src, TINTER) {
|
||||
*why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", src)
|
||||
} else if have != nil && have.Sym == missing.Sym && have.Nointerface() {
|
||||
*why = fmt.Sprintf(":\n\t%v does not implement %v (%v method is marked 'nointerface')", src, dst, missing.Sym)
|
||||
} else if have != nil && have.Sym == missing.Sym {
|
||||
*why = fmt.Sprintf(":\n\t%v does not implement %v (wrong type for %v method)\n"+
|
||||
"\t\thave %v%0S\n\t\twant %v%0S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
||||
} else if ptr != 0 {
|
||||
*why = fmt.Sprintf(":\n\t%v does not implement %v (%v method has pointer receiver)", src, dst, missing.Sym)
|
||||
} else if have != nil {
|
||||
*why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)\n"+
|
||||
"\t\thave %v%0S\n\t\twant %v%0S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
||||
} else {
|
||||
*why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)", src, dst, missing.Sym)
|
||||
}
|
||||
var why string
|
||||
if isptrto(src, TINTER) {
|
||||
why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", src)
|
||||
} else if have != nil && have.Sym == missing.Sym && have.Nointerface() {
|
||||
why = fmt.Sprintf(":\n\t%v does not implement %v (%v method is marked 'nointerface')", src, dst, missing.Sym)
|
||||
} else if have != nil && have.Sym == missing.Sym {
|
||||
why = fmt.Sprintf(":\n\t%v does not implement %v (wrong type for %v method)\n"+
|
||||
"\t\thave %v%0S\n\t\twant %v%0S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
||||
} else if ptr != 0 {
|
||||
why = fmt.Sprintf(":\n\t%v does not implement %v (%v method has pointer receiver)", src, dst, missing.Sym)
|
||||
} else if have != nil {
|
||||
why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)\n"+
|
||||
"\t\thave %v%0S\n\t\twant %v%0S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
||||
} else {
|
||||
why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)", src, dst, missing.Sym)
|
||||
}
|
||||
|
||||
return OXXX
|
||||
return OXXX, why
|
||||
}
|
||||
|
||||
if isptrto(dst, TINTER) {
|
||||
if why != nil {
|
||||
*why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", dst)
|
||||
}
|
||||
return OXXX
|
||||
why := fmt.Sprintf(":\n\t%v is pointer to interface, not interface", dst)
|
||||
return OXXX, why
|
||||
}
|
||||
|
||||
if src.IsInterface() && dst.Etype != TBLANK {
|
||||
var missing, have *types.Field
|
||||
var ptr int
|
||||
if why != nil && implements(dst, src, &missing, &have, &ptr) {
|
||||
*why = ": need type assertion"
|
||||
var why string
|
||||
if implements(dst, src, &missing, &have, &ptr) {
|
||||
why = ": need type assertion"
|
||||
}
|
||||
return OXXX
|
||||
return OXXX, why
|
||||
}
|
||||
|
||||
// 4. src is a bidirectional channel value, dst is a channel type,
|
||||
|
|
@ -640,7 +635,7 @@ func assignop(src, dst *types.Type, why *string) Op {
|
|||
// either src or dst is not a named type.
|
||||
if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
|
||||
if types.Identical(src.Elem(), dst.Elem()) && (src.Sym == nil || dst.Sym == nil) {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -653,7 +648,7 @@ func assignop(src, dst *types.Type, why *string) Op {
|
|||
TCHAN,
|
||||
TINTER,
|
||||
TSLICE:
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -661,26 +656,23 @@ func assignop(src, dst *types.Type, why *string) Op {
|
|||
|
||||
// 7. Any typed value can be assigned to the blank identifier.
|
||||
if dst.Etype == TBLANK {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
return OXXX
|
||||
return OXXX, ""
|
||||
}
|
||||
|
||||
// Can we convert a value of type src to a value of type dst?
|
||||
// If so, return op code to use in conversion (maybe OCONVNOP).
|
||||
// If not, return OXXX.
|
||||
// If not, return OXXX. In this case, the string return parameter may
|
||||
// hold a reason why. In all other cases, it'll be the empty string.
|
||||
// srcConstant indicates whether the value of type src is a constant.
|
||||
func convertop(srcConstant bool, src, dst *types.Type, why *string) Op {
|
||||
if why != nil {
|
||||
*why = ""
|
||||
}
|
||||
|
||||
func convertop(srcConstant bool, src, dst *types.Type) (Op, string) {
|
||||
if src == dst {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
if src == nil || dst == nil {
|
||||
return OXXX
|
||||
return OXXX, ""
|
||||
}
|
||||
|
||||
// Conversions from regular to go:notinheap are not allowed
|
||||
|
|
@ -688,23 +680,19 @@ func convertop(srcConstant bool, src, dst *types.Type, why *string) Op {
|
|||
// rules.
|
||||
// (a) Disallow (*T) to (*U) where T is go:notinheap but U isn't.
|
||||
if src.IsPtr() && dst.IsPtr() && dst.Elem().NotInHeap() && !src.Elem().NotInHeap() {
|
||||
if why != nil {
|
||||
*why = fmt.Sprintf(":\n\t%v is incomplete (or unallocatable), but %v is not", dst.Elem(), src.Elem())
|
||||
}
|
||||
return OXXX
|
||||
why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable), but %v is not", dst.Elem(), src.Elem())
|
||||
return OXXX, why
|
||||
}
|
||||
// (b) Disallow string to []T where T is go:notinheap.
|
||||
if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Etype == types.Bytetype.Etype || dst.Elem().Etype == types.Runetype.Etype) {
|
||||
if why != nil {
|
||||
*why = fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
|
||||
}
|
||||
return OXXX
|
||||
why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
|
||||
return OXXX, why
|
||||
}
|
||||
|
||||
// 1. src can be assigned to dst.
|
||||
op := assignop(src, dst, why)
|
||||
op, why := assignop(src, dst)
|
||||
if op != OXXX {
|
||||
return op
|
||||
return op, why
|
||||
}
|
||||
|
||||
// The rules for interfaces are no different in conversions
|
||||
|
|
@ -712,60 +700,57 @@ func convertop(srcConstant bool, src, dst *types.Type, why *string) Op {
|
|||
// with the good message from assignop.
|
||||
// Otherwise clear the error.
|
||||
if src.IsInterface() || dst.IsInterface() {
|
||||
return OXXX
|
||||
}
|
||||
if why != nil {
|
||||
*why = ""
|
||||
return OXXX, why
|
||||
}
|
||||
|
||||
// 2. Ignoring struct tags, src and dst have identical underlying types.
|
||||
if types.IdenticalIgnoreTags(src.Orig, dst.Orig) {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
// 3. src and dst are unnamed pointer types and, ignoring struct tags,
|
||||
// their base types have identical underlying types.
|
||||
if src.IsPtr() && dst.IsPtr() && src.Sym == nil && dst.Sym == nil {
|
||||
if types.IdenticalIgnoreTags(src.Elem().Orig, dst.Elem().Orig) {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
}
|
||||
|
||||
// 4. src and dst are both integer or floating point types.
|
||||
if (src.IsInteger() || src.IsFloat()) && (dst.IsInteger() || dst.IsFloat()) {
|
||||
if simtype[src.Etype] == simtype[dst.Etype] {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
return OCONV
|
||||
return OCONV, ""
|
||||
}
|
||||
|
||||
// 5. src and dst are both complex types.
|
||||
if src.IsComplex() && dst.IsComplex() {
|
||||
if simtype[src.Etype] == simtype[dst.Etype] {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
return OCONV
|
||||
return OCONV, ""
|
||||
}
|
||||
|
||||
// Special case for constant conversions: any numeric
|
||||
// conversion is potentially okay. We'll validate further
|
||||
// within evconst. See #38117.
|
||||
if srcConstant && (src.IsInteger() || src.IsFloat() || src.IsComplex()) && (dst.IsInteger() || dst.IsFloat() || dst.IsComplex()) {
|
||||
return OCONV
|
||||
return OCONV, ""
|
||||
}
|
||||
|
||||
// 6. src is an integer or has type []byte or []rune
|
||||
// and dst is a string type.
|
||||
if src.IsInteger() && dst.IsString() {
|
||||
return ORUNESTR
|
||||
return ORUNESTR, ""
|
||||
}
|
||||
|
||||
if src.IsSlice() && dst.IsString() {
|
||||
if src.Elem().Etype == types.Bytetype.Etype {
|
||||
return OBYTES2STR
|
||||
return OBYTES2STR, ""
|
||||
}
|
||||
if src.Elem().Etype == types.Runetype.Etype {
|
||||
return ORUNES2STR
|
||||
return ORUNES2STR, ""
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -773,21 +758,21 @@ func convertop(srcConstant bool, src, dst *types.Type, why *string) Op {
|
|||
// String to slice.
|
||||
if src.IsString() && dst.IsSlice() {
|
||||
if dst.Elem().Etype == types.Bytetype.Etype {
|
||||
return OSTR2BYTES
|
||||
return OSTR2BYTES, ""
|
||||
}
|
||||
if dst.Elem().Etype == types.Runetype.Etype {
|
||||
return OSTR2RUNES
|
||||
return OSTR2RUNES, ""
|
||||
}
|
||||
}
|
||||
|
||||
// 8. src is a pointer or uintptr and dst is unsafe.Pointer.
|
||||
if (src.IsPtr() || src.IsUintptr()) && dst.IsUnsafePtr() {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
// 9. src is unsafe.Pointer and dst is a pointer or uintptr.
|
||||
if src.IsUnsafePtr() && (dst.IsPtr() || dst.IsUintptr()) {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
// src is map and dst is a pointer to corresponding hmap.
|
||||
|
|
@ -795,10 +780,10 @@ func convertop(srcConstant bool, src, dst *types.Type, why *string) Op {
|
|||
// go gc maps are implemented as a pointer to a hmap struct.
|
||||
if src.Etype == TMAP && dst.IsPtr() &&
|
||||
src.MapType().Hmap == dst.Elem() {
|
||||
return OCONVNOP
|
||||
return OCONVNOP, ""
|
||||
}
|
||||
|
||||
return OXXX
|
||||
return OXXX, ""
|
||||
}
|
||||
|
||||
func assignconv(n *Node, t *types.Type, context string) *Node {
|
||||
|
|
@ -825,7 +810,7 @@ func assignconvfn(n *Node, t *types.Type, context func() string) *Node {
|
|||
|
||||
// Convert ideal bool from comparison to plain bool
|
||||
// if the next step is non-bool (like interface{}).
|
||||
if n.Type == types.Idealbool && !t.IsBoolean() {
|
||||
if n.Type == types.UntypedBool && !t.IsBoolean() {
|
||||
if n.Op == ONAME || n.Op == OLITERAL {
|
||||
r := nod(OCONVNOP, n, nil)
|
||||
r.Type = types.Types[TBOOL]
|
||||
|
|
@ -839,8 +824,7 @@ func assignconvfn(n *Node, t *types.Type, context func() string) *Node {
|
|||
return n
|
||||
}
|
||||
|
||||
var why string
|
||||
op := assignop(n.Type, t, &why)
|
||||
op, why := assignop(n.Type, t)
|
||||
if op == OXXX {
|
||||
yyerror("cannot use %L as type %v in %s%s", n, t, context(), why)
|
||||
op = OCONV
|
||||
|
|
@ -1040,25 +1024,24 @@ func calcHasCall(n *Node) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func badtype(op Op, tl *types.Type, tr *types.Type) {
|
||||
fmt_ := ""
|
||||
func badtype(op Op, tl, tr *types.Type) {
|
||||
var s string
|
||||
if tl != nil {
|
||||
fmt_ += fmt.Sprintf("\n\t%v", tl)
|
||||
s += fmt.Sprintf("\n\t%v", tl)
|
||||
}
|
||||
if tr != nil {
|
||||
fmt_ += fmt.Sprintf("\n\t%v", tr)
|
||||
s += fmt.Sprintf("\n\t%v", tr)
|
||||
}
|
||||
|
||||
// common mistake: *struct and *interface.
|
||||
if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
|
||||
if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
|
||||
fmt_ += "\n\t(*struct vs *interface)"
|
||||
s += "\n\t(*struct vs *interface)"
|
||||
} else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
|
||||
fmt_ += "\n\t(*interface vs *struct)"
|
||||
s += "\n\t(*interface vs *struct)"
|
||||
}
|
||||
}
|
||||
|
||||
s := fmt_
|
||||
yyerror("illegal types for operand: %v%s", op, s)
|
||||
}
|
||||
|
||||
|
|
@ -1921,3 +1904,13 @@ func ifaceData(pos src.XPos, n *Node, t *types.Type) *Node {
|
|||
ind.SetBounded(true)
|
||||
return ind
|
||||
}
|
||||
|
||||
// typePos returns the position associated with t.
|
||||
// This is where t was declared or where it appeared as a type expression.
|
||||
func typePos(t *types.Type) src.XPos {
|
||||
n := asNode(t.Nod)
|
||||
if n == nil || !n.Pos.IsKnown() {
|
||||
Fatalf("bad type: %v", t)
|
||||
}
|
||||
return n.Pos
|
||||
}
|
||||
|
|
|
|||
|
|
@ -189,16 +189,19 @@ func typecheckExprSwitch(n *Node) {
|
|||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case nilonly != "" && !n1.isNil():
|
||||
if nilonly != "" && !n1.isNil() {
|
||||
yyerrorl(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
|
||||
case t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type):
|
||||
} else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) {
|
||||
yyerrorl(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
|
||||
case assignop(n1.Type, t, nil) == 0 && assignop(t, n1.Type, nil) == 0:
|
||||
if n.Left != nil {
|
||||
yyerrorl(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
|
||||
} else {
|
||||
yyerrorl(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
|
||||
} else {
|
||||
op1, _ := assignop(n1.Type, t)
|
||||
op2, _ := assignop(t, n1.Type)
|
||||
if op1 == OXXX && op2 == OXXX {
|
||||
if n.Left != nil {
|
||||
yyerrorl(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
|
||||
} else {
|
||||
yyerrorl(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -358,8 +361,8 @@ func (s *exprSwitch) flush() {
|
|||
// all we need here is consistency. We respect this
|
||||
// sorting below.
|
||||
sort.Slice(cc, func(i, j int) bool {
|
||||
si := strlit(cc[i].lo)
|
||||
sj := strlit(cc[j].lo)
|
||||
si := cc[i].lo.StringVal()
|
||||
sj := cc[j].lo.StringVal()
|
||||
if len(si) != len(sj) {
|
||||
return len(si) < len(sj)
|
||||
}
|
||||
|
|
@ -368,7 +371,7 @@ func (s *exprSwitch) flush() {
|
|||
|
||||
// runLen returns the string length associated with a
|
||||
// particular run of exprClauses.
|
||||
runLen := func(run []exprClause) int64 { return int64(len(strlit(run[0].lo))) }
|
||||
runLen := func(run []exprClause) int64 { return int64(len(run[0].lo.StringVal())) }
|
||||
|
||||
// Collapse runs of consecutive strings with the same length.
|
||||
var runs [][]exprClause
|
||||
|
|
@ -405,7 +408,7 @@ func (s *exprSwitch) flush() {
|
|||
merged := cc[:1]
|
||||
for _, c := range cc[1:] {
|
||||
last := &merged[len(merged)-1]
|
||||
if last.jmp == c.jmp && last.hi.Int64()+1 == c.lo.Int64() {
|
||||
if last.jmp == c.jmp && last.hi.Int64Val()+1 == c.lo.Int64Val() {
|
||||
last.hi = c.lo
|
||||
} else {
|
||||
merged = append(merged, c)
|
||||
|
|
@ -440,7 +443,7 @@ func (c *exprClause) test(exprname *Node) *Node {
|
|||
|
||||
// Optimize "switch true { ...}" and "switch false { ... }".
|
||||
if Isconst(exprname, CTBOOL) && !c.lo.Type.IsInterface() {
|
||||
if exprname.Val().U.(bool) {
|
||||
if exprname.BoolVal() {
|
||||
return c.lo
|
||||
} else {
|
||||
return nodl(c.pos, ONOT, c.lo, nil)
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
ok |= ctxExpr
|
||||
|
||||
if n.Type == nil && n.Val().Ctype() == CTSTR {
|
||||
n.Type = types.Idealstring
|
||||
n.Type = types.UntypedString
|
||||
}
|
||||
|
||||
case ONONAME:
|
||||
|
|
@ -623,8 +623,8 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
// no defaultlit for left
|
||||
// the outer context gives the type
|
||||
n.Type = l.Type
|
||||
if (l.Type == types.Idealfloat || l.Type == types.Idealcomplex) && r.Op == OLITERAL {
|
||||
n.Type = types.Idealint
|
||||
if (l.Type == types.UntypedFloat || l.Type == types.UntypedComplex) && r.Op == OLITERAL {
|
||||
n.Type = types.UntypedInt
|
||||
}
|
||||
|
||||
break
|
||||
|
|
@ -674,8 +674,8 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
// The conversion allocates, so only do it if the concrete type is huge.
|
||||
converted := false
|
||||
if r.Type.Etype != TBLANK {
|
||||
aop = assignop(l.Type, r.Type, nil)
|
||||
if aop != 0 {
|
||||
aop, _ = assignop(l.Type, r.Type)
|
||||
if aop != OXXX {
|
||||
if r.Type.IsInterface() && !l.Type.IsInterface() && !IsComparable(l.Type) {
|
||||
yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type))
|
||||
n.Type = nil
|
||||
|
|
@ -696,8 +696,8 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
}
|
||||
|
||||
if !converted && l.Type.Etype != TBLANK {
|
||||
aop = assignop(r.Type, l.Type, nil)
|
||||
if aop != 0 {
|
||||
aop, _ = assignop(r.Type, l.Type)
|
||||
if aop != OXXX {
|
||||
if l.Type.IsInterface() && !r.Type.IsInterface() && !IsComparable(r.Type) {
|
||||
yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type))
|
||||
n.Type = nil
|
||||
|
|
@ -777,7 +777,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
|
||||
if iscmp[n.Op] {
|
||||
evconst(n)
|
||||
t = types.Idealbool
|
||||
t = types.UntypedBool
|
||||
if n.Op != OLITERAL {
|
||||
l, r = defaultlit2(l, r, true)
|
||||
n.Left = l
|
||||
|
|
@ -1046,13 +1046,13 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
}
|
||||
|
||||
if !n.Bounded() && Isconst(n.Right, CTINT) {
|
||||
x := n.Right.Int64()
|
||||
x := n.Right.Int64Val()
|
||||
if x < 0 {
|
||||
yyerror("invalid %s index %v (index must be non-negative)", why, n.Right)
|
||||
} else if t.IsArray() && x >= t.NumElem() {
|
||||
yyerror("invalid array index %v (out of bounds for %d-element array)", n.Right, t.NumElem())
|
||||
} else if Isconst(n.Left, CTSTR) && x >= int64(len(strlit(n.Left))) {
|
||||
yyerror("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(strlit(n.Left)))
|
||||
} else if Isconst(n.Left, CTSTR) && x >= int64(len(n.Left.StringVal())) {
|
||||
yyerror("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(n.Left.StringVal()))
|
||||
} else if n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
|
||||
yyerror("invalid %s index %v (index too large)", why, n.Right)
|
||||
}
|
||||
|
|
@ -1148,11 +1148,11 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
l = defaultlit(l, types.Types[TINT])
|
||||
c = defaultlit(c, types.Types[TINT])
|
||||
|
||||
if Isconst(l, CTINT) && l.Int64() < 0 {
|
||||
if Isconst(l, CTINT) && l.Int64Val() < 0 {
|
||||
Fatalf("len for OSLICEHEADER must be non-negative")
|
||||
}
|
||||
|
||||
if Isconst(c, CTINT) && c.Int64() < 0 {
|
||||
if Isconst(c, CTINT) && c.Int64Val() < 0 {
|
||||
Fatalf("cap for OSLICEHEADER must be non-negative")
|
||||
}
|
||||
|
||||
|
|
@ -1201,7 +1201,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
if n.Left.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
|
||||
Fatalf("len for OMAKESLICECOPY too large")
|
||||
}
|
||||
if n.Left.Int64() < 0 {
|
||||
if n.Left.Int64Val() < 0 {
|
||||
Fatalf("len for OMAKESLICECOPY must be non-negative")
|
||||
}
|
||||
}
|
||||
|
|
@ -1458,7 +1458,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
// Determine result type.
|
||||
switch t.Etype {
|
||||
case TIDEAL:
|
||||
n.Type = types.Idealfloat
|
||||
n.Type = types.UntypedFloat
|
||||
case TCOMPLEX64:
|
||||
n.Type = types.Types[TFLOAT32]
|
||||
case TCOMPLEX128:
|
||||
|
|
@ -1504,7 +1504,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
return n
|
||||
|
||||
case TIDEAL:
|
||||
t = types.Idealcomplex
|
||||
t = types.UntypedComplex
|
||||
|
||||
case TFLOAT32:
|
||||
t = types.Types[TCOMPLEX64]
|
||||
|
|
@ -1691,7 +1691,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
return n
|
||||
}
|
||||
var why string
|
||||
n.Op = convertop(n.Left.Op == OLITERAL, t, n.Type, &why)
|
||||
n.Op, why = convertop(n.Left.Op == OLITERAL, t, n.Type)
|
||||
if n.Op == OXXX {
|
||||
if !n.Diag() && !n.Type.Broke() && !n.Left.Diag() {
|
||||
yyerror("cannot convert %L to type %v%s", n.Left, n.Type, why)
|
||||
|
|
@ -1770,7 +1770,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
if !checkmake(t, "len", l) || r != nil && !checkmake(t, "cap", r) {
|
||||
if !checkmake(t, "len", &l) || r != nil && !checkmake(t, "cap", &r) {
|
||||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
|
|
@ -1794,7 +1794,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
if !checkmake(t, "size", l) {
|
||||
if !checkmake(t, "size", &l) {
|
||||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
|
|
@ -1815,7 +1815,7 @@ func typecheck1(n *Node, top int) (res *Node) {
|
|||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
if !checkmake(t, "buffer", l) {
|
||||
if !checkmake(t, "buffer", &l) {
|
||||
n.Type = nil
|
||||
return n
|
||||
}
|
||||
|
|
@ -2187,14 +2187,14 @@ func checksliceindex(l *Node, r *Node, tp *types.Type) bool {
|
|||
}
|
||||
|
||||
if r.Op == OLITERAL {
|
||||
if r.Int64() < 0 {
|
||||
if r.Int64Val() < 0 {
|
||||
yyerror("invalid slice index %v (index must be non-negative)", r)
|
||||
return false
|
||||
} else if tp != nil && tp.NumElem() >= 0 && r.Int64() > tp.NumElem() {
|
||||
} else if tp != nil && tp.NumElem() >= 0 && r.Int64Val() > tp.NumElem() {
|
||||
yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
|
||||
return false
|
||||
} else if Isconst(l, CTSTR) && r.Int64() > int64(len(strlit(l))) {
|
||||
yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(strlit(l)))
|
||||
} else if Isconst(l, CTSTR) && r.Int64Val() > int64(len(l.StringVal())) {
|
||||
yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal()))
|
||||
return false
|
||||
} else if r.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
|
||||
yyerror("invalid slice index %v (index too large)", r)
|
||||
|
|
@ -2724,9 +2724,9 @@ func errorDetails(nl Nodes, tstruct *types.Type, isddd bool) string {
|
|||
// e.g in error messages about wrong arguments to return.
|
||||
func sigrepr(t *types.Type, isddd bool) string {
|
||||
switch t {
|
||||
case types.Idealstring:
|
||||
case types.UntypedString:
|
||||
return "string"
|
||||
case types.Idealbool:
|
||||
case types.UntypedBool:
|
||||
return "bool"
|
||||
}
|
||||
|
||||
|
|
@ -3267,9 +3267,7 @@ func typecheckas(n *Node) {
|
|||
}
|
||||
|
||||
func checkassignto(src *types.Type, dst *Node) {
|
||||
var why string
|
||||
|
||||
if assignop(src, dst.Type, &why) == 0 {
|
||||
if op, why := assignop(src, dst.Type); op == OXXX {
|
||||
yyerror("cannot assign %v to %L in multiple assignment%s", src, dst, why)
|
||||
return
|
||||
}
|
||||
|
|
@ -3450,9 +3448,8 @@ func stringtoruneslit(n *Node) *Node {
|
|||
}
|
||||
|
||||
var l []*Node
|
||||
s := strlit(n.Left)
|
||||
i := 0
|
||||
for _, r := range s {
|
||||
for _, r := range n.Left.StringVal() {
|
||||
l = append(l, nod(OKEY, nodintconst(int64(i)), nodintconst(int64(r))))
|
||||
i++
|
||||
}
|
||||
|
|
@ -3729,7 +3726,8 @@ ret:
|
|||
n.SetWalkdef(1)
|
||||
}
|
||||
|
||||
func checkmake(t *types.Type, arg string, n *Node) bool {
|
||||
func checkmake(t *types.Type, arg string, np **Node) bool {
|
||||
n := *np
|
||||
if !n.Type.IsInteger() && n.Type.Etype != TIDEAL {
|
||||
yyerror("non-integer %s argument in make(%v) - %v", arg, t, n.Type)
|
||||
return false
|
||||
|
|
@ -3739,12 +3737,12 @@ func checkmake(t *types.Type, arg string, n *Node) bool {
|
|||
// to avoid redundant "constant NNN overflows int" errors.
|
||||
switch consttype(n) {
|
||||
case CTINT, CTRUNE, CTFLT, CTCPLX:
|
||||
n.SetVal(toint(n.Val()))
|
||||
if n.Val().U.(*Mpint).CmpInt64(0) < 0 {
|
||||
v := toint(n.Val()).U.(*Mpint)
|
||||
if v.CmpInt64(0) < 0 {
|
||||
yyerror("negative %s argument in make(%v)", arg, t)
|
||||
return false
|
||||
}
|
||||
if n.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
|
||||
if v.Cmp(maxintval[TINT]) > 0 {
|
||||
yyerror("%s argument too large in make(%v)", arg, t)
|
||||
return false
|
||||
}
|
||||
|
|
@ -3756,6 +3754,7 @@ func checkmake(t *types.Type, arg string, n *Node) bool {
|
|||
// for instance, indexlit might be called here and incorporate some
|
||||
// of the bounds checks done for make.
|
||||
n = defaultlit(n, types.Types[TINT])
|
||||
*np = n
|
||||
|
||||
return true
|
||||
}
|
||||
|
|
@ -3902,7 +3901,7 @@ func deadcodefn(fn *Node) {
|
|||
return
|
||||
}
|
||||
case OFOR:
|
||||
if !Isconst(n.Left, CTBOOL) || n.Left.Bool() {
|
||||
if !Isconst(n.Left, CTBOOL) || n.Left.BoolVal() {
|
||||
return
|
||||
}
|
||||
default:
|
||||
|
|
@ -3932,7 +3931,7 @@ func deadcodeslice(nn Nodes) {
|
|||
n.Left = deadcodeexpr(n.Left)
|
||||
if Isconst(n.Left, CTBOOL) {
|
||||
var body Nodes
|
||||
if n.Left.Bool() {
|
||||
if n.Left.BoolVal() {
|
||||
n.Rlist = Nodes{}
|
||||
body = n.Nbody
|
||||
} else {
|
||||
|
|
@ -3975,7 +3974,7 @@ func deadcodeexpr(n *Node) *Node {
|
|||
n.Left = deadcodeexpr(n.Left)
|
||||
n.Right = deadcodeexpr(n.Right)
|
||||
if Isconst(n.Left, CTBOOL) {
|
||||
if n.Left.Bool() {
|
||||
if n.Left.BoolVal() {
|
||||
return n.Right // true && x => x
|
||||
} else {
|
||||
return n.Left // false && x => false
|
||||
|
|
@ -3985,7 +3984,7 @@ func deadcodeexpr(n *Node) *Node {
|
|||
n.Left = deadcodeexpr(n.Left)
|
||||
n.Right = deadcodeexpr(n.Right)
|
||||
if Isconst(n.Left, CTBOOL) {
|
||||
if n.Left.Bool() {
|
||||
if n.Left.BoolVal() {
|
||||
return n.Left // true || x => true
|
||||
} else {
|
||||
return n.Right // false || x => x
|
||||
|
|
|
|||
|
|
@ -123,21 +123,21 @@ func lexinit() {
|
|||
asNode(s2.Def).SetSubOp(s.op)
|
||||
}
|
||||
|
||||
types.Idealstring = types.New(TSTRING)
|
||||
types.Idealbool = types.New(TBOOL)
|
||||
types.UntypedString = types.New(TSTRING)
|
||||
types.UntypedBool = types.New(TBOOL)
|
||||
types.Types[TANY] = types.New(TANY)
|
||||
|
||||
s := builtinpkg.Lookup("true")
|
||||
s.Def = asTypesNode(nodbool(true))
|
||||
asNode(s.Def).Sym = lookup("true")
|
||||
asNode(s.Def).Name = new(Name)
|
||||
asNode(s.Def).Type = types.Idealbool
|
||||
asNode(s.Def).Type = types.UntypedBool
|
||||
|
||||
s = builtinpkg.Lookup("false")
|
||||
s.Def = asTypesNode(nodbool(false))
|
||||
asNode(s.Def).Sym = lookup("false")
|
||||
asNode(s.Def).Name = new(Name)
|
||||
asNode(s.Def).Type = types.Idealbool
|
||||
asNode(s.Def).Type = types.UntypedBool
|
||||
|
||||
s = lookup("_")
|
||||
s.Block = -100
|
||||
|
|
@ -351,7 +351,7 @@ func typeinit() {
|
|||
sizeofString = Rnd(sliceLenOffset+int64(Widthptr), int64(Widthptr))
|
||||
|
||||
dowidth(types.Types[TSTRING])
|
||||
dowidth(types.Idealstring)
|
||||
dowidth(types.UntypedString)
|
||||
}
|
||||
|
||||
func makeErrorInterface() *types.Type {
|
||||
|
|
|
|||
|
|
@ -336,19 +336,6 @@ func walkstmt(n *Node) *Node {
|
|||
return n
|
||||
}
|
||||
|
||||
func isSmallMakeSlice(n *Node) bool {
|
||||
if n.Op != OMAKESLICE {
|
||||
return false
|
||||
}
|
||||
r := n.Right
|
||||
if r == nil {
|
||||
r = n.Left
|
||||
}
|
||||
t := n.Type
|
||||
|
||||
return smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < maxImplicitStackVarSize/t.Elem().Width)
|
||||
}
|
||||
|
||||
// walk the whole tree of the body of an
|
||||
// expression or simple statement.
|
||||
// the types expressions are calculated.
|
||||
|
|
@ -565,6 +552,7 @@ opswitch:
|
|||
case OCALLINTER, OCALLFUNC, OCALLMETH:
|
||||
if n.Op == OCALLINTER {
|
||||
usemethod(n)
|
||||
markUsedIfaceMethod(n)
|
||||
}
|
||||
|
||||
if n.Op == OCALLFUNC && n.Left.Op == OCLOSURE {
|
||||
|
|
@ -805,8 +793,8 @@ opswitch:
|
|||
fromType := n.Left.Type
|
||||
toType := n.Type
|
||||
|
||||
if !fromType.IsInterface() {
|
||||
markTypeUsedInInterface(fromType)
|
||||
if !fromType.IsInterface() && !Curfn.Func.Nname.isBlank() { // skip unnamed functions (func _())
|
||||
markTypeUsedInInterface(fromType, Curfn.Func.lsym)
|
||||
}
|
||||
|
||||
// typeword generates the type word of the interface value.
|
||||
|
|
@ -1013,7 +1001,7 @@ opswitch:
|
|||
// The SSA backend will handle those.
|
||||
switch et {
|
||||
case TINT64:
|
||||
c := n.Right.Int64()
|
||||
c := n.Right.Int64Val()
|
||||
if c < 0 {
|
||||
c = -c
|
||||
}
|
||||
|
|
@ -1021,7 +1009,7 @@ opswitch:
|
|||
break opswitch
|
||||
}
|
||||
case TUINT64:
|
||||
c := uint64(n.Right.Int64())
|
||||
c := uint64(n.Right.Int64Val())
|
||||
if c != 0 && c&(c-1) == 0 {
|
||||
break opswitch
|
||||
}
|
||||
|
|
@ -1068,7 +1056,7 @@ opswitch:
|
|||
yyerror("index out of bounds")
|
||||
}
|
||||
} else if Isconst(n.Left, CTSTR) {
|
||||
n.SetBounded(bounded(r, int64(len(strlit(n.Left)))))
|
||||
n.SetBounded(bounded(r, int64(len(n.Left.StringVal()))))
|
||||
if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
|
||||
Warn("index bounds check elided")
|
||||
}
|
||||
|
|
@ -1338,8 +1326,8 @@ opswitch:
|
|||
yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
|
||||
}
|
||||
if n.Esc == EscNone {
|
||||
if !isSmallMakeSlice(n) {
|
||||
Fatalf("non-small OMAKESLICE with EscNone: %v", n)
|
||||
if why := heapAllocReason(n); why != "" {
|
||||
Fatalf("%v has EscNone, but %v", n, why)
|
||||
}
|
||||
// var arr [r]T
|
||||
// n = arr[:l]
|
||||
|
|
@ -1503,7 +1491,7 @@ opswitch:
|
|||
case OSTR2BYTES:
|
||||
s := n.Left
|
||||
if Isconst(s, CTSTR) {
|
||||
sc := strlit(s)
|
||||
sc := s.StringVal()
|
||||
|
||||
// Allocate a [n]byte of the right size.
|
||||
t := types.NewArray(types.Types[TUINT8], int64(len(sc)))
|
||||
|
|
@ -1621,8 +1609,27 @@ opswitch:
|
|||
|
||||
// markTypeUsedInInterface marks that type t is converted to an interface.
|
||||
// This information is used in the linker in dead method elimination.
|
||||
func markTypeUsedInInterface(t *types.Type) {
|
||||
typenamesym(t).Linksym().Set(obj.AttrUsedInIface, true)
|
||||
func markTypeUsedInInterface(t *types.Type, from *obj.LSym) {
|
||||
tsym := typenamesym(t).Linksym()
|
||||
// Emit a marker relocation. The linker will know the type is converted
|
||||
// to an interface if "from" is reachable.
|
||||
r := obj.Addrel(from)
|
||||
r.Sym = tsym
|
||||
r.Type = objabi.R_USEIFACE
|
||||
}
|
||||
|
||||
// markUsedIfaceMethod marks that an interface method is used in the current
|
||||
// function. n is OCALLINTER node.
|
||||
func markUsedIfaceMethod(n *Node) {
|
||||
ityp := n.Left.Left.Type
|
||||
tsym := typenamesym(ityp).Linksym()
|
||||
r := obj.Addrel(Curfn.Func.lsym)
|
||||
r.Sym = tsym
|
||||
// n.Left.Xoffset is the method index * Widthptr (the offset of code pointer
|
||||
// in itab).
|
||||
midx := n.Left.Xoffset / int64(Widthptr)
|
||||
r.Add = ifaceMethodOffset(ityp, midx)
|
||||
r.Type = objabi.R_USEIFACEMETHOD
|
||||
}
|
||||
|
||||
// rtconvfn returns the parameter and result types that will be used by a
|
||||
|
|
@ -1912,7 +1919,7 @@ func walkprint(nn *Node, init *Nodes) *Node {
|
|||
for i := 0; i < len(s); {
|
||||
var strs []string
|
||||
for i < len(s) && Isconst(s[i], CTSTR) {
|
||||
strs = append(strs, strlit(s[i]))
|
||||
strs = append(strs, s[i].StringVal())
|
||||
i++
|
||||
}
|
||||
if len(strs) > 0 {
|
||||
|
|
@ -1981,7 +1988,7 @@ func walkprint(nn *Node, init *Nodes) *Node {
|
|||
case TSTRING:
|
||||
cs := ""
|
||||
if Isconst(n, CTSTR) {
|
||||
cs = strlit(n)
|
||||
cs = n.StringVal()
|
||||
}
|
||||
switch cs {
|
||||
case " ":
|
||||
|
|
@ -2150,7 +2157,7 @@ func reorder3(all []*Node) []*Node {
|
|||
// The result of reorder3save MUST be assigned back to n, e.g.
|
||||
// n.Left = reorder3save(n.Left, all, i, early)
|
||||
func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
|
||||
if !aliased(n, all, i) {
|
||||
if !aliased(n, all[:i]) {
|
||||
return n
|
||||
}
|
||||
|
||||
|
|
@ -2182,73 +2189,75 @@ func outervalue(n *Node) *Node {
|
|||
}
|
||||
}
|
||||
|
||||
// Is it possible that the computation of n might be
|
||||
// affected by writes in as up to but not including the ith element?
|
||||
func aliased(n *Node, all []*Node, i int) bool {
|
||||
if n == nil {
|
||||
// Is it possible that the computation of r might be
|
||||
// affected by assignments in all?
|
||||
func aliased(r *Node, all []*Node) bool {
|
||||
if r == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// Treat all fields of a struct as referring to the whole struct.
|
||||
// We could do better but we would have to keep track of the fields.
|
||||
for n.Op == ODOT {
|
||||
n = n.Left
|
||||
for r.Op == ODOT {
|
||||
r = r.Left
|
||||
}
|
||||
|
||||
// Look for obvious aliasing: a variable being assigned
|
||||
// during the all list and appearing in n.
|
||||
// Also record whether there are any writes to main memory.
|
||||
// Also record whether there are any writes to variables
|
||||
// whose addresses have been taken.
|
||||
// Also record whether there are any writes to addressable
|
||||
// memory (either main memory or variables whose addresses
|
||||
// have been taken).
|
||||
memwrite := false
|
||||
varwrite := false
|
||||
for _, an := range all[:i] {
|
||||
a := outervalue(an.Left)
|
||||
|
||||
for a.Op == ODOT {
|
||||
a = a.Left
|
||||
for _, as := range all {
|
||||
// We can ignore assignments to blank.
|
||||
if as.Left.isBlank() {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.Op != ONAME {
|
||||
l := outervalue(as.Left)
|
||||
if l.Op != ONAME {
|
||||
memwrite = true
|
||||
continue
|
||||
}
|
||||
|
||||
switch n.Class() {
|
||||
switch l.Class() {
|
||||
default:
|
||||
varwrite = true
|
||||
Fatalf("unexpected class: %v, %v", l, l.Class())
|
||||
|
||||
case PAUTOHEAP, PEXTERN:
|
||||
memwrite = true
|
||||
continue
|
||||
|
||||
case PAUTO, PPARAM, PPARAMOUT:
|
||||
if n.Name.Addrtaken() {
|
||||
varwrite = true
|
||||
if l.Name.Addrtaken() {
|
||||
memwrite = true
|
||||
continue
|
||||
}
|
||||
|
||||
if vmatch2(a, n) {
|
||||
// Direct hit.
|
||||
if vmatch2(l, r) {
|
||||
// Direct hit: l appears in r.
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The variables being written do not appear in n.
|
||||
// However, n might refer to computed addresses
|
||||
// The variables being written do not appear in r.
|
||||
// However, r might refer to computed addresses
|
||||
// that are being written.
|
||||
|
||||
// If no computed addresses are affected by the writes, no aliasing.
|
||||
if !memwrite && !varwrite {
|
||||
if !memwrite {
|
||||
return false
|
||||
}
|
||||
|
||||
// If n does not refer to computed addresses
|
||||
// (that is, if n only refers to variables whose addresses
|
||||
// If r does not refer to computed addresses
|
||||
// (that is, if r only refers to variables whose addresses
|
||||
// have not been taken), no aliasing.
|
||||
if varexpr(n) {
|
||||
if varexpr(r) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Otherwise, both the writes and n refer to computed memory addresses.
|
||||
// Otherwise, both the writes and r refer to computed memory addresses.
|
||||
// Assume that they might conflict.
|
||||
return true
|
||||
}
|
||||
|
|
@ -2636,7 +2645,7 @@ func addstr(n *Node, init *Nodes) *Node {
|
|||
sz := int64(0)
|
||||
for _, n1 := range n.List.Slice() {
|
||||
if n1.Op == OLITERAL {
|
||||
sz += int64(len(strlit(n1)))
|
||||
sz += int64(len(n1.StringVal()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -3430,7 +3439,7 @@ func walkcompare(n *Node, init *Nodes) *Node {
|
|||
|
||||
func tracecmpArg(n *Node, t *types.Type, init *Nodes) *Node {
|
||||
// Ugly hack to avoid "constant -1 overflows uintptr" errors, etc.
|
||||
if n.Op == OLITERAL && n.Type.IsSigned() && n.Int64() < 0 {
|
||||
if n.Op == OLITERAL && n.Type.IsSigned() && n.Int64Val() < 0 {
|
||||
n = copyexpr(n, n.Type, init)
|
||||
}
|
||||
|
||||
|
|
@ -3500,7 +3509,7 @@ func walkcompareString(n *Node, init *Nodes) *Node {
|
|||
// Length-only checks are ok, though.
|
||||
maxRewriteLen = 0
|
||||
}
|
||||
if s := strlit(cs); len(s) <= maxRewriteLen {
|
||||
if s := cs.StringVal(); len(s) <= maxRewriteLen {
|
||||
if len(s) > 0 {
|
||||
ncs = safeexpr(ncs, init)
|
||||
}
|
||||
|
|
@ -3595,7 +3604,7 @@ func bounded(n *Node, max int64) bool {
|
|||
bits := int32(8 * n.Type.Width)
|
||||
|
||||
if smallintconst(n) {
|
||||
v := n.Int64()
|
||||
v := n.Int64Val()
|
||||
return 0 <= v && v < max
|
||||
}
|
||||
|
||||
|
|
@ -3603,9 +3612,9 @@ func bounded(n *Node, max int64) bool {
|
|||
case OAND:
|
||||
v := int64(-1)
|
||||
if smallintconst(n.Left) {
|
||||
v = n.Left.Int64()
|
||||
v = n.Left.Int64Val()
|
||||
} else if smallintconst(n.Right) {
|
||||
v = n.Right.Int64()
|
||||
v = n.Right.Int64Val()
|
||||
}
|
||||
|
||||
if 0 <= v && v < max {
|
||||
|
|
@ -3614,7 +3623,7 @@ func bounded(n *Node, max int64) bool {
|
|||
|
||||
case OMOD:
|
||||
if !sign && smallintconst(n.Right) {
|
||||
v := n.Right.Int64()
|
||||
v := n.Right.Int64Val()
|
||||
if 0 <= v && v <= max {
|
||||
return true
|
||||
}
|
||||
|
|
@ -3622,7 +3631,7 @@ func bounded(n *Node, max int64) bool {
|
|||
|
||||
case ODIV:
|
||||
if !sign && smallintconst(n.Right) {
|
||||
v := n.Right.Int64()
|
||||
v := n.Right.Int64Val()
|
||||
for bits > 0 && v >= 2 {
|
||||
bits--
|
||||
v >>= 1
|
||||
|
|
@ -3631,7 +3640,7 @@ func bounded(n *Node, max int64) bool {
|
|||
|
||||
case ORSH:
|
||||
if !sign && smallintconst(n.Right) {
|
||||
v := n.Right.Int64()
|
||||
v := n.Right.Int64Val()
|
||||
if v > int64(bits) {
|
||||
return true
|
||||
}
|
||||
|
|
@ -3687,6 +3696,8 @@ func usemethod(n *Node) {
|
|||
// Also need to check for reflect package itself (see Issue #38515).
|
||||
if s := res0.Type.Sym; s != nil && s.Name == "Method" && isReflectPkg(s.Pkg) {
|
||||
Curfn.Func.SetReflectMethod(true)
|
||||
// The LSym is initialized at this point. We need to set the attribute on the LSym.
|
||||
Curfn.Func.lsym.Set(obj.AttrReflectMethod, true)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -3870,6 +3881,16 @@ func wrapCall(n *Node, init *Nodes) *Node {
|
|||
}
|
||||
|
||||
isBuiltinCall := n.Op != OCALLFUNC && n.Op != OCALLMETH && n.Op != OCALLINTER
|
||||
|
||||
// Turn f(a, b, []T{c, d, e}...) back into f(a, b, c, d, e).
|
||||
if !isBuiltinCall && n.IsDDD() {
|
||||
last := n.List.Len() - 1
|
||||
if va := n.List.Index(last); va.Op == OSLICELIT {
|
||||
n.List.Set(append(n.List.Slice()[:last], va.List.Slice()...))
|
||||
n.SetIsDDD(false)
|
||||
}
|
||||
}
|
||||
|
||||
// origArgs keeps track of what argument is uintptr-unsafe/unsafe-uintptr conversion.
|
||||
origArgs := make([]*Node, n.List.Len())
|
||||
t := nod(OTFUNC, nil, nil)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// This implements (non)optimization logging for -json option to the Go compiler
|
||||
|
|
@ -223,11 +224,11 @@ type Diagnostic struct {
|
|||
// A LoggedOpt is what the compiler produces and accumulates,
|
||||
// to be converted to JSON for human or IDE consumption.
|
||||
type LoggedOpt struct {
|
||||
pos src.XPos // Source code position at which the event occurred. If it is inlined, outer and all inlined locations will appear in JSON.
|
||||
pass string // For human/adhoc consumption; does not appear in JSON (yet)
|
||||
fname string // For human/adhoc consumption; does not appear in JSON (yet)
|
||||
what string // The (non) optimization; "nilcheck", "boundsCheck", "inline", "noInline"
|
||||
target []interface{} // Optional target(s) or parameter(s) of "what" -- what was inlined, why it was not, size of copy, etc. 1st is most important/relevant.
|
||||
pos src.XPos // Source code position at which the event occurred. If it is inlined, outer and all inlined locations will appear in JSON.
|
||||
compilerPass string // Compiler pass. For human/adhoc consumption; does not appear in JSON (yet)
|
||||
functionName string // Function name. For human/adhoc consumption; does not appear in JSON (yet)
|
||||
what string // The (non) optimization; "nilcheck", "boundsCheck", "inline", "noInline"
|
||||
target []interface{} // Optional target(s) or parameter(s) of "what" -- what was inlined, why it was not, size of copy, etc. 1st is most important/relevant.
|
||||
}
|
||||
|
||||
type logFormat uint8
|
||||
|
|
@ -240,12 +241,13 @@ const (
|
|||
var Format = None
|
||||
var dest string
|
||||
|
||||
// LogJsonOption parses and validates the version,directory value attached to the -json compiler flag.
|
||||
func LogJsonOption(flagValue string) {
|
||||
version, directory := parseLogFlag("json", flagValue)
|
||||
if version != 0 {
|
||||
log.Fatal("-json version must be 0")
|
||||
}
|
||||
checkLogPath("json", directory)
|
||||
dest = checkLogPath(directory)
|
||||
Format = Json0
|
||||
}
|
||||
|
||||
|
|
@ -268,51 +270,80 @@ func parseLogFlag(flag, value string) (version int, directory string) {
|
|||
return
|
||||
}
|
||||
|
||||
// isWindowsDriveURI returns true if the file URI is of the format used by
|
||||
// Windows URIs. The url.Parse package does not specially handle Windows paths
|
||||
// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:").
|
||||
// (copied from tools/internal/span/uri.go)
|
||||
// this is less comprehensive that the processing in filepath.IsAbs on Windows.
|
||||
func isWindowsDriveURIPath(uri string) bool {
|
||||
if len(uri) < 4 {
|
||||
return false
|
||||
}
|
||||
return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
|
||||
}
|
||||
|
||||
func parseLogPath(destination string) (string, string) {
|
||||
if filepath.IsAbs(destination) {
|
||||
return filepath.Clean(destination), ""
|
||||
}
|
||||
if strings.HasPrefix(destination, "file://") { // IKWIAD, or Windows C:\foo\bar\baz
|
||||
uri, err := url.Parse(destination)
|
||||
if err != nil {
|
||||
return "", fmt.Sprintf("optimizer logging destination looked like file:// URI but failed to parse: err=%v", err)
|
||||
}
|
||||
destination = uri.Host + uri.Path
|
||||
if isWindowsDriveURIPath(destination) {
|
||||
// strip leading / from /C:
|
||||
// unlike tools/internal/span/uri.go, do not uppercase the drive letter -- let filepath.Clean do what it does.
|
||||
destination = destination[1:]
|
||||
}
|
||||
return filepath.Clean(destination), ""
|
||||
}
|
||||
return "", fmt.Sprintf("optimizer logging destination %s was neither %s-prefixed directory nor file://-prefixed file URI", destination, string(filepath.Separator))
|
||||
}
|
||||
|
||||
// checkLogPath does superficial early checking of the string specifying
|
||||
// the directory to which optimizer logging is directed, and if
|
||||
// it passes the test, stores the string in LO_dir
|
||||
func checkLogPath(flag, destination string) {
|
||||
sep := string(os.PathSeparator)
|
||||
if strings.HasPrefix(destination, "/") || strings.HasPrefix(destination, sep) {
|
||||
err := os.MkdirAll(destination, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("optimizer logging destination '<version>,<directory>' but could not create <directory>: err=%v", err)
|
||||
}
|
||||
} else if strings.HasPrefix(destination, "file://") { // IKWIAD, or Windows C:\foo\bar\baz
|
||||
uri, err := url.Parse(destination)
|
||||
if err != nil {
|
||||
log.Fatalf("optimizer logging destination looked like file:// URI but failed to parse: err=%v", err)
|
||||
}
|
||||
destination = uri.Host + uri.Path
|
||||
err = os.MkdirAll(destination, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("optimizer logging destination '<version>,<directory>' but could not create %s: err=%v", destination, err)
|
||||
}
|
||||
} else {
|
||||
log.Fatalf("optimizer logging destination %s was neither %s-prefixed directory nor file://-prefixed file URI", destination, sep)
|
||||
func checkLogPath(destination string) string {
|
||||
path, complaint := parseLogPath(destination)
|
||||
if complaint != "" {
|
||||
log.Fatalf(complaint)
|
||||
}
|
||||
dest = destination
|
||||
err := os.MkdirAll(path, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("optimizer logging destination '<version>,<directory>' but could not create <directory>: err=%v", err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
var loggedOpts []*LoggedOpt
|
||||
var mu = sync.Mutex{} // mu protects loggedOpts.
|
||||
|
||||
func NewLoggedOpt(pos src.XPos, what, pass, fname string, args ...interface{}) *LoggedOpt {
|
||||
// NewLoggedOpt allocates a new LoggedOpt, to later be passed to either NewLoggedOpt or LogOpt as "args".
|
||||
// Pos is the source position (including inlining), what is the message, pass is which pass created the message,
|
||||
// funcName is the name of the function
|
||||
// A typical use for this to accumulate an explanation for a missed optimization, for example, why did something escape?
|
||||
func NewLoggedOpt(pos src.XPos, what, pass, funcName string, args ...interface{}) *LoggedOpt {
|
||||
pass = strings.Replace(pass, " ", "_", -1)
|
||||
return &LoggedOpt{pos, pass, fname, what, args}
|
||||
return &LoggedOpt{pos, pass, funcName, what, args}
|
||||
}
|
||||
|
||||
func LogOpt(pos src.XPos, what, pass, fname string, args ...interface{}) {
|
||||
// Logopt logs information about a (usually missed) optimization performed by the compiler.
|
||||
// Pos is the source position (including inlining), what is the message, pass is which pass created the message,
|
||||
// funcName is the name of the function
|
||||
func LogOpt(pos src.XPos, what, pass, funcName string, args ...interface{}) {
|
||||
if Format == None {
|
||||
return
|
||||
}
|
||||
lo := NewLoggedOpt(pos, what, pass, fname, args...)
|
||||
lo := NewLoggedOpt(pos, what, pass, funcName, args...)
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
// Because of concurrent calls from back end, no telling what the order will be, but is stable-sorted by outer Pos before use.
|
||||
loggedOpts = append(loggedOpts, lo)
|
||||
}
|
||||
|
||||
// Enabled returns whether optimization logging is enabled.
|
||||
func Enabled() bool {
|
||||
switch Format {
|
||||
case None:
|
||||
|
|
@ -459,11 +490,13 @@ func FlushLoggedOpts(ctxt *obj.Link, slashPkgPath string) {
|
|||
}
|
||||
}
|
||||
|
||||
// newPointRange returns a single-position Range for the compiler source location p.
|
||||
func newPointRange(p src.Pos) Range {
|
||||
return Range{Start: Position{p.Line(), p.Col()},
|
||||
End: Position{p.Line(), p.Col()}}
|
||||
}
|
||||
|
||||
// newLocation returns the Location for the compiler source location p
|
||||
func newLocation(p src.Pos) Location {
|
||||
loc := Location{URI: uriIfy(uprootedPath(p.Filename())), Range: newPointRange(p)}
|
||||
return loc
|
||||
|
|
|
|||
|
|
@ -55,6 +55,34 @@ func wantN(t *testing.T, out string, desired string, n int) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestPathStuff(t *testing.T) {
|
||||
sep := string(filepath.Separator)
|
||||
if path, whine := parseLogPath("file:///c:foo"); path != "c:foo" || whine != "" { // good path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
if path, whine := parseLogPath("file:///foo"); path != sep+"foo" || whine != "" { // good path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
if path, whine := parseLogPath("foo"); path != "" || whine == "" { // BAD path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
if sep == "\\" { // On WINDOWS ONLY
|
||||
if path, whine := parseLogPath("C:/foo"); path != "C:\\foo" || whine != "" { // good path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
if path, whine := parseLogPath("c:foo"); path != "" || whine == "" { // BAD path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
if path, whine := parseLogPath("/foo"); path != "" || whine == "" { // BAD path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
} else { // ON UNIX ONLY
|
||||
if path, whine := parseLogPath("/foo"); path != sep+"foo" || whine != "" { // good path
|
||||
t.Errorf("path='%s', whine='%s'", path, whine)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLogOpt(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
|
@ -180,12 +208,11 @@ func s15a8(x *[15]int64) [15]int64 {
|
|||
`"relatedInformation":[{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":4,"character":11},"end":{"line":4,"character":11}}},"message":"inlineLoc"}]}`)
|
||||
want(t, slogged, `{"range":{"start":{"line":11,"character":6},"end":{"line":11,"character":6}},"severity":3,"code":"isInBounds","source":"go compiler","message":""}`)
|
||||
want(t, slogged, `{"range":{"start":{"line":7,"character":6},"end":{"line":7,"character":6}},"severity":3,"code":"canInlineFunction","source":"go compiler","message":"cost: 35"}`)
|
||||
want(t, slogged, `{"range":{"start":{"line":21,"character":21},"end":{"line":21,"character":21}},"severity":3,"code":"cannotInlineCall","source":"go compiler","message":"foo cannot be inlined (escaping closure variable)"}`)
|
||||
// escape analysis explanation
|
||||
want(t, slogged, `{"range":{"start":{"line":7,"character":13},"end":{"line":7,"character":13}},"severity":3,"code":"leak","source":"go compiler","message":"parameter z leaks to ~r2 with derefs=0",`+
|
||||
`"relatedInformation":[`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: flow: y = z:"},`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from y = \u003cN\u003e (assign-pair)"},`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from y := z (assign-pair)"},`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: flow: ~r1 = y:"},`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":4,"character":11},"end":{"line":4,"character":11}}},"message":"inlineLoc"},`+
|
||||
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from y.b (dot of pointer)"},`+
|
||||
|
|
|
|||
|
|
@ -570,9 +570,9 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
|||
r1 := v.Args[0].Reg()
|
||||
shifts := v.AuxInt
|
||||
p := s.Prog(v.Op.Asm())
|
||||
// clrlslwi ra,rs,sh,mb will become rlwinm ra,rs,sh,mb-sh,31-n as described in ISA
|
||||
p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftsh(shifts)}
|
||||
p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftmb(shifts)})
|
||||
// clrlslwi ra,rs,mb,sh will become rlwinm ra,rs,sh,mb-sh,31-sh as described in ISA
|
||||
p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftmb(shifts)}
|
||||
p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftsh(shifts)})
|
||||
p.Reg = r1
|
||||
p.To.Type = obj.TYPE_REG
|
||||
p.To.Reg = r
|
||||
|
|
@ -582,9 +582,9 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
|||
r1 := v.Args[0].Reg()
|
||||
shifts := v.AuxInt
|
||||
p := s.Prog(v.Op.Asm())
|
||||
// clrlsldi ra,rs,sh,mb will become rldic ra,rs,sh,mb-sh
|
||||
p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftsh(shifts)}
|
||||
p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftmb(shifts)})
|
||||
// clrlsldi ra,rs,mb,sh will become rldic ra,rs,sh,mb-sh
|
||||
p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftmb(shifts)}
|
||||
p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: ssa.GetPPC64Shiftsh(shifts)})
|
||||
p.Reg = r1
|
||||
p.To.Type = obj.TYPE_REG
|
||||
p.To.Reg = r
|
||||
|
|
@ -677,7 +677,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
|||
p.From.Reg = v.Args[0].Reg()
|
||||
|
||||
case ssa.OpPPC64ADDconst, ssa.OpPPC64ANDconst, ssa.OpPPC64ORconst, ssa.OpPPC64XORconst,
|
||||
ssa.OpPPC64SRADconst, ssa.OpPPC64SRAWconst, ssa.OpPPC64SRDconst, ssa.OpPPC64SRWconst, ssa.OpPPC64SLDconst, ssa.OpPPC64SLWconst:
|
||||
ssa.OpPPC64SRADconst, ssa.OpPPC64SRAWconst, ssa.OpPPC64SRDconst, ssa.OpPPC64SRWconst,
|
||||
ssa.OpPPC64SLDconst, ssa.OpPPC64SLWconst, ssa.OpPPC64EXTSWSLconst, ssa.OpPPC64MULLWconst, ssa.OpPPC64MULLDconst:
|
||||
p := s.Prog(v.Op.Asm())
|
||||
p.Reg = v.Args[0].Reg()
|
||||
p.From.Type = obj.TYPE_CONST
|
||||
|
|
|
|||
|
|
@ -47,6 +47,9 @@ func Compile(f *Func) {
|
|||
stack := make([]byte, 16384)
|
||||
n := runtime.Stack(stack, false)
|
||||
stack = stack[:n]
|
||||
if f.HTMLWriter != nil {
|
||||
f.HTMLWriter.flushPhases()
|
||||
}
|
||||
f.Fatalf("panic during %s while compiling %s:\n\n%v\n\n%s\n", phaseName, f.Name, err, stack)
|
||||
}
|
||||
}()
|
||||
|
|
@ -201,6 +204,13 @@ func (p *pass) addDump(s string) {
|
|||
p.dump[s] = true
|
||||
}
|
||||
|
||||
func (p *pass) String() string {
|
||||
if p == nil {
|
||||
return "nil pass"
|
||||
}
|
||||
return p.name
|
||||
}
|
||||
|
||||
// Run consistency checker between each phase
|
||||
var (
|
||||
checkEnabled = false
|
||||
|
|
@ -431,9 +441,9 @@ var passes = [...]pass{
|
|||
{name: "nilcheckelim", fn: nilcheckelim},
|
||||
{name: "prove", fn: prove},
|
||||
{name: "early fuse", fn: fuseEarly},
|
||||
{name: "expand calls", fn: expandCalls, required: true},
|
||||
{name: "decompose builtin", fn: decomposeBuiltIn, required: true},
|
||||
{name: "softfloat", fn: softfloat, required: true},
|
||||
{name: "expand calls", fn:expandCalls, required: true},
|
||||
{name: "late opt", fn: opt, required: true}, // TODO: split required rules and optimizing rules
|
||||
{name: "dead auto elim", fn: elimDeadAutosGeneric},
|
||||
{name: "generic deadcode", fn: deadcode, required: true}, // remove dead stores, which otherwise mess up store chain
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ type Config struct {
|
|||
useSSE bool // Use SSE for non-float operations
|
||||
useAvg bool // Use optimizations that need Avg* operations
|
||||
useHmul bool // Use optimizations that need Hmul* operations
|
||||
use387 bool // GO386=387
|
||||
SoftFloat bool //
|
||||
Race bool // race detector enabled
|
||||
NeedsFpScratch bool // No direct move between GP and FP register sets
|
||||
|
|
@ -196,6 +195,14 @@ const (
|
|||
ClassParamOut // return value
|
||||
)
|
||||
|
||||
const go116lateCallExpansion = true
|
||||
|
||||
// LateCallExpansionEnabledWithin returns true if late call expansion should be tested
|
||||
// within compilation of a function/method triggered by GOSSAHASH (defaults to "yes").
|
||||
func LateCallExpansionEnabledWithin(f *Func) bool {
|
||||
return go116lateCallExpansion && f.DebugTest // Currently set up for GOSSAHASH bug searches
|
||||
}
|
||||
|
||||
// NewConfig returns a new configuration object for the given architecture.
|
||||
func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config {
|
||||
c := &Config{arch: arch, Types: types}
|
||||
|
|
@ -379,9 +386,4 @@ func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config
|
|||
return c
|
||||
}
|
||||
|
||||
func (c *Config) Set387(b bool) {
|
||||
c.NeedsFpScratch = b
|
||||
c.use387 = b
|
||||
}
|
||||
|
||||
func (c *Config) Ctxt() *obj.Link { return c.ctxt }
|
||||
|
|
|
|||
|
|
@ -4,14 +4,85 @@
|
|||
|
||||
package ssa
|
||||
|
||||
import "cmd/compile/internal/types"
|
||||
import (
|
||||
"cmd/compile/internal/types"
|
||||
"cmd/internal/src"
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// expandCalls converts LE (Late Expansion) calls that act like they receive value args into a lower-level form
|
||||
// that is more oriented to a platform's ABI. The SelectN operations that extract results are also rewritten into
|
||||
// more appropriate forms.
|
||||
// that is more oriented to a platform's ABI. The SelectN operations that extract results are rewritten into
|
||||
// more appropriate forms, and any StructMake or ArrayMake inputs are decomposed until non-struct values are
|
||||
// reached (for now, Strings, Slices, Complex, and Interface are not decomposed because they are rewritten in
|
||||
// a subsequent phase, but that may need to change for a register ABI in case one of those composite values is
|
||||
// split between registers and memory).
|
||||
//
|
||||
// TODO: when it comes time to use registers, might want to include builtin selectors as well, but currently that happens in lower.
|
||||
func expandCalls(f *Func) {
|
||||
if !LateCallExpansionEnabledWithin(f) {
|
||||
return
|
||||
}
|
||||
canSSAType := f.fe.CanSSA
|
||||
regSize := f.Config.RegSize
|
||||
sp, _ := f.spSb()
|
||||
|
||||
debug := f.pass.debug > 0
|
||||
|
||||
// For 32-bit, need to deal with decomposition of 64-bit integers
|
||||
tUint32 := types.Types[types.TUINT32]
|
||||
tInt32 := types.Types[types.TINT32]
|
||||
var hiOffset, lowOffset int64
|
||||
if f.Config.BigEndian {
|
||||
lowOffset = 4
|
||||
} else {
|
||||
hiOffset = 4
|
||||
}
|
||||
|
||||
// intPairTypes returns the pair of 32-bit int types needed to encode a 64-bit integer type on a target
|
||||
// that has no 64-bit integer registers.
|
||||
intPairTypes := func(et types.EType) (tHi, tLo *types.Type) {
|
||||
tHi = tUint32
|
||||
if et == types.TINT64 {
|
||||
tHi = tInt32
|
||||
}
|
||||
tLo = tUint32
|
||||
return
|
||||
}
|
||||
|
||||
// isAlreadyExpandedAggregateType returns whether a type is an SSA-able "aggregate" (multiple register) type
|
||||
// that was expanded in an earlier phase (small user-defined arrays and structs, lowered in decomposeUser).
|
||||
// Other aggregate types are expanded in decomposeBuiltin, which comes later.
|
||||
isAlreadyExpandedAggregateType := func(t *types.Type) bool {
|
||||
if !canSSAType(t) {
|
||||
return false
|
||||
}
|
||||
return t.IsStruct() || t.IsArray() || regSize == 4 && t.Size() > 4 && t.IsInteger()
|
||||
}
|
||||
|
||||
// removeTrivialWrapperTypes unwraps layers of
|
||||
// struct { singleField SomeType } and [1]SomeType
|
||||
// until a non-wrapper type is reached. This is useful
|
||||
// for working with assignments to/from interface data
|
||||
// fields (either second operand to OpIMake or OpIData)
|
||||
// where the wrapping or type conversion can be elided
|
||||
// because of type conversions/assertions in source code
|
||||
// that do not appear in SSA.
|
||||
removeTrivialWrapperTypes := func(t *types.Type) *types.Type {
|
||||
for {
|
||||
if t.IsStruct() && t.NumFields() == 1 {
|
||||
t = t.Field(0).Type
|
||||
continue
|
||||
}
|
||||
if t.IsArray() && t.NumElem() == 1 {
|
||||
t = t.Elem()
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// Calls that need lowering have some number of inputs, including a memory input,
|
||||
// and produce a tuple of (value1, value2, ..., mem) where valueK may or may not be SSA-able.
|
||||
|
||||
|
|
@ -21,31 +92,285 @@ func expandCalls(f *Func) {
|
|||
// With the current ABI, the outputs need to be converted to loads, which will all use the call's
|
||||
// memory output as their input.
|
||||
|
||||
// Step 1: find all references to calls as values and rewrite those.
|
||||
// rewriteSelect recursively walks leaf selector to a root (OpSelectN) through
|
||||
// a chain of Struct/Array Select operations. If the chain of selectors does not
|
||||
// end in OpSelectN, it does nothing (this can happen depending on compiler phase ordering).
|
||||
// It emits the code necessary to implement the leaf select operation that leads to the call.
|
||||
// TODO when registers really arrive, must also decompose anything split across two registers or registers and memory.
|
||||
var rewriteSelect func(leaf *Value, selector *Value, offset int64)
|
||||
rewriteSelect = func(leaf *Value, selector *Value, offset int64) {
|
||||
switch selector.Op {
|
||||
case OpSelectN:
|
||||
// TODO these may be duplicated. Should memoize. Intermediate selectors will go dead, no worries there.
|
||||
call := selector.Args[0]
|
||||
aux := call.Aux.(*AuxCall)
|
||||
which := selector.AuxInt
|
||||
if which == aux.NResults() { // mem is after the results.
|
||||
// rewrite v as a Copy of call -- the replacement call will produce a mem.
|
||||
leaf.copyOf(call)
|
||||
} else {
|
||||
leafType := removeTrivialWrapperTypes(leaf.Type)
|
||||
pt := types.NewPtr(leafType)
|
||||
if canSSAType(leafType) {
|
||||
off := f.ConstOffPtrSP(pt, offset+aux.OffsetOfResult(which), sp)
|
||||
// Any selection right out of the arg area/registers has to be same Block as call, use call as mem input.
|
||||
if leaf.Block == call.Block {
|
||||
leaf.reset(OpLoad)
|
||||
leaf.SetArgs2(off, call)
|
||||
leaf.Type = leafType
|
||||
} else {
|
||||
w := call.Block.NewValue2(leaf.Pos, OpLoad, leafType, off, call)
|
||||
leaf.copyOf(w)
|
||||
}
|
||||
} else {
|
||||
panic("Should not have non-SSA-able OpSelectN")
|
||||
}
|
||||
}
|
||||
case OpStructSelect:
|
||||
w := selector.Args[0]
|
||||
if w.Type.Etype != types.TSTRUCT {
|
||||
fmt.Printf("Bad type for w:\nv=%v\nsel=%v\nw=%v\n,f=%s\n", leaf.LongString(), selector.LongString(), w.LongString(), f.Name)
|
||||
}
|
||||
rewriteSelect(leaf, w, offset+w.Type.FieldOff(int(selector.AuxInt)))
|
||||
|
||||
case OpInt64Hi:
|
||||
w := selector.Args[0]
|
||||
rewriteSelect(leaf, w, offset+hiOffset)
|
||||
|
||||
case OpInt64Lo:
|
||||
w := selector.Args[0]
|
||||
rewriteSelect(leaf, w, offset+lowOffset)
|
||||
|
||||
case OpArraySelect:
|
||||
w := selector.Args[0]
|
||||
rewriteSelect(leaf, w, offset+selector.Type.Size()*selector.AuxInt)
|
||||
default:
|
||||
// Ignore dead ends; on 32-bit, these can occur running before decompose builtins.
|
||||
}
|
||||
}
|
||||
|
||||
// storeArg converts stores of SSA-able aggregate arguments (passed to a call) into a series of stores of
|
||||
// smaller types into individual parameter slots.
|
||||
// TODO when registers really arrive, must also decompose anything split across two registers or registers and memory.
|
||||
var storeArg func(pos src.XPos, b *Block, a *Value, t *types.Type, offset int64, mem *Value) *Value
|
||||
storeArg = func(pos src.XPos, b *Block, a *Value, t *types.Type, offset int64, mem *Value) *Value {
|
||||
switch a.Op {
|
||||
case OpArrayMake0, OpStructMake0:
|
||||
return mem
|
||||
case OpStructMake1, OpStructMake2, OpStructMake3, OpStructMake4:
|
||||
for i := 0; i < t.NumFields(); i++ {
|
||||
fld := t.Field(i)
|
||||
mem = storeArg(pos, b, a.Args[i], fld.Type, offset+fld.Offset, mem)
|
||||
}
|
||||
return mem
|
||||
case OpArrayMake1:
|
||||
return storeArg(pos, b, a.Args[0], t.Elem(), offset, mem)
|
||||
|
||||
case OpInt64Make:
|
||||
tHi, tLo := intPairTypes(t.Etype)
|
||||
mem = storeArg(pos, b, a.Args[0], tHi, offset+hiOffset, mem)
|
||||
return storeArg(pos, b, a.Args[1], tLo, offset+lowOffset, mem)
|
||||
}
|
||||
dst := f.ConstOffPtrSP(types.NewPtr(t), offset, sp)
|
||||
x := b.NewValue3A(pos, OpStore, types.TypeMem, t, dst, a, mem)
|
||||
if debug {
|
||||
fmt.Printf("storeArg(%v) returns %s\n", a, x.LongString())
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// offsetFrom creates an offset from a pointer, simplifying chained offsets and offsets from SP
|
||||
// TODO should also optimize offsets from SB?
|
||||
offsetFrom := func(dst *Value, offset int64, t *types.Type) *Value {
|
||||
pt := types.NewPtr(t)
|
||||
if offset == 0 && dst.Type == pt { // this is not actually likely
|
||||
return dst
|
||||
}
|
||||
if dst.Op != OpOffPtr {
|
||||
return dst.Block.NewValue1I(dst.Pos.WithNotStmt(), OpOffPtr, pt, offset, dst)
|
||||
}
|
||||
// Simplify OpOffPtr
|
||||
from := dst.Args[0]
|
||||
offset += dst.AuxInt
|
||||
if from == sp {
|
||||
return f.ConstOffPtrSP(pt, offset, sp)
|
||||
}
|
||||
return dst.Block.NewValue1I(dst.Pos.WithNotStmt(), OpOffPtr, pt, offset, from)
|
||||
}
|
||||
|
||||
// splitStore converts a store of an SSA-able aggregate into a series of smaller stores, emitting
|
||||
// appropriate Struct/Array Select operations (which will soon go dead) to obtain the parts.
|
||||
var splitStore func(dst, src, mem, v *Value, t *types.Type, offset int64, firstStorePos src.XPos) *Value
|
||||
splitStore = func(dst, src, mem, v *Value, t *types.Type, offset int64, firstStorePos src.XPos) *Value {
|
||||
// TODO might be worth commoning up duplicate selectors, but since they go dead, maybe no point.
|
||||
pos := v.Pos.WithNotStmt()
|
||||
switch t.Etype {
|
||||
case types.TINT64, types.TUINT64:
|
||||
if t.Width == regSize {
|
||||
break
|
||||
}
|
||||
tHi, tLo := intPairTypes(t.Etype)
|
||||
sel := src.Block.NewValue1(pos, OpInt64Hi, tHi, src)
|
||||
mem = splitStore(dst, sel, mem, v, tHi, offset+hiOffset, firstStorePos)
|
||||
firstStorePos = firstStorePos.WithNotStmt()
|
||||
sel = src.Block.NewValue1(pos, OpInt64Lo, tLo, src)
|
||||
return splitStore(dst, sel, mem, v, tLo, offset+lowOffset, firstStorePos)
|
||||
|
||||
case types.TARRAY:
|
||||
elt := t.Elem()
|
||||
if src.Op == OpIData && t.NumElem() == 1 && t.Width == regSize && elt.Width == regSize {
|
||||
t = removeTrivialWrapperTypes(t)
|
||||
if t.Etype == types.TSTRUCT || t.Etype == types.TARRAY {
|
||||
f.Fatalf("Did not expect to find IDATA-immediate with non-trivial struct/array in it")
|
||||
}
|
||||
break // handle the leaf type.
|
||||
}
|
||||
for i := int64(0); i < t.NumElem(); i++ {
|
||||
sel := src.Block.NewValue1I(pos, OpArraySelect, elt, i, src)
|
||||
mem = splitStore(dst, sel, mem, v, elt, offset+i*elt.Width, firstStorePos)
|
||||
firstStorePos = firstStorePos.WithNotStmt()
|
||||
}
|
||||
return mem
|
||||
case types.TSTRUCT:
|
||||
if src.Op == OpIData && t.NumFields() == 1 && t.Field(0).Type.Width == t.Width && t.Width == regSize {
|
||||
// This peculiar test deals with accesses to immediate interface data.
|
||||
// It works okay because everything is the same size.
|
||||
// Example code that triggers this can be found in go/constant/value.go, function ToComplex
|
||||
// v119 (+881) = IData <intVal> v6
|
||||
// v121 (+882) = StaticLECall <floatVal,mem> {AuxCall{"".itof([intVal,0])[floatVal,8]}} [16] v119 v1
|
||||
// This corresponds to the generic rewrite rule "(StructSelect [0] (IData x)) => (IData x)"
|
||||
// Guard against "struct{struct{*foo}}"
|
||||
t = removeTrivialWrapperTypes(t)
|
||||
if t.Etype == types.TSTRUCT || t.Etype == types.TARRAY {
|
||||
f.Fatalf("Did not expect to find IDATA-immediate with non-trivial struct/array in it")
|
||||
}
|
||||
break // handle the leaf type.
|
||||
}
|
||||
for i := 0; i < t.NumFields(); i++ {
|
||||
fld := t.Field(i)
|
||||
sel := src.Block.NewValue1I(pos, OpStructSelect, fld.Type, int64(i), src)
|
||||
mem = splitStore(dst, sel, mem, v, fld.Type, offset+fld.Offset, firstStorePos)
|
||||
firstStorePos = firstStorePos.WithNotStmt()
|
||||
}
|
||||
return mem
|
||||
}
|
||||
// Default, including for aggregates whose single element exactly fills their container
|
||||
// TODO this will be a problem for cast interfaces containing floats when we move to registers.
|
||||
x := v.Block.NewValue3A(firstStorePos, OpStore, types.TypeMem, t, offsetFrom(dst, offset, t), src, mem)
|
||||
if debug {
|
||||
fmt.Printf("splitStore(%v, %v, %v, %v) returns %s\n", dst, src, mem, v, x.LongString())
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// rewriteArgs removes all the Args from a call and converts the call args into appropriate
|
||||
// stores (or later, register movement). Extra args for interface and closure calls are ignored,
|
||||
// but removed.
|
||||
rewriteArgs := func(v *Value, firstArg int) *Value {
|
||||
// Thread the stores on the memory arg
|
||||
aux := v.Aux.(*AuxCall)
|
||||
pos := v.Pos.WithNotStmt()
|
||||
m0 := v.Args[len(v.Args)-1]
|
||||
mem := m0
|
||||
for i, a := range v.Args {
|
||||
if i < firstArg {
|
||||
continue
|
||||
}
|
||||
if a == m0 { // mem is last.
|
||||
break
|
||||
}
|
||||
auxI := int64(i - firstArg)
|
||||
if a.Op == OpDereference {
|
||||
if a.MemoryArg() != m0 {
|
||||
f.Fatalf("Op...LECall and OpDereference have mismatched mem, %s and %s", v.LongString(), a.LongString())
|
||||
}
|
||||
// "Dereference" of addressed (probably not-SSA-eligible) value becomes Move
|
||||
// TODO this will be more complicated with registers in the picture.
|
||||
src := a.Args[0]
|
||||
dst := f.ConstOffPtrSP(src.Type, aux.OffsetOfArg(auxI), sp)
|
||||
if a.Uses == 1 && a.Block == v.Block {
|
||||
a.reset(OpMove)
|
||||
a.Pos = pos
|
||||
a.Type = types.TypeMem
|
||||
a.Aux = aux.TypeOfArg(auxI)
|
||||
a.AuxInt = aux.SizeOfArg(auxI)
|
||||
a.SetArgs3(dst, src, mem)
|
||||
mem = a
|
||||
} else {
|
||||
mem = v.Block.NewValue3A(pos, OpMove, types.TypeMem, aux.TypeOfArg(auxI), dst, src, mem)
|
||||
mem.AuxInt = aux.SizeOfArg(auxI)
|
||||
}
|
||||
} else {
|
||||
mem = storeArg(pos, v.Block, a, aux.TypeOfArg(auxI), aux.OffsetOfArg(auxI), mem)
|
||||
}
|
||||
}
|
||||
v.resetArgs()
|
||||
return mem
|
||||
}
|
||||
|
||||
// Step 0: rewrite the calls to convert incoming args to stores.
|
||||
for _, b := range f.Blocks {
|
||||
for _, v := range b.Values {
|
||||
switch v.Op {
|
||||
case OpSelectN:
|
||||
call := v.Args[0]
|
||||
aux := call.Aux.(*AuxCall)
|
||||
which := v.AuxInt
|
||||
t := v.Type
|
||||
if which == aux.NResults() { // mem is after the results.
|
||||
// rewrite v as a Copy of call -- the replacement call will produce a mem.
|
||||
v.copyOf(call)
|
||||
} else {
|
||||
pt := types.NewPtr(t)
|
||||
if canSSAType(t) {
|
||||
off := f.ConstOffPtrSP(pt, aux.OffsetOfResult(which), sp)
|
||||
v.reset(OpLoad)
|
||||
v.SetArgs2(off, call)
|
||||
} else {
|
||||
panic("Should not have non-SSA-able OpSelectN")
|
||||
case OpStaticLECall:
|
||||
mem := rewriteArgs(v, 0)
|
||||
v.SetArgs1(mem)
|
||||
case OpClosureLECall:
|
||||
code := v.Args[0]
|
||||
context := v.Args[1]
|
||||
mem := rewriteArgs(v, 2)
|
||||
v.SetArgs3(code, context, mem)
|
||||
case OpInterLECall:
|
||||
code := v.Args[0]
|
||||
mem := rewriteArgs(v, 1)
|
||||
v.SetArgs2(code, mem)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 1: any stores of aggregates remaining are believed to be sourced from call results.
|
||||
// Decompose those stores into a series of smaller stores, adding selection ops as necessary.
|
||||
for _, b := range f.Blocks {
|
||||
for _, v := range b.Values {
|
||||
if v.Op == OpStore {
|
||||
t := v.Aux.(*types.Type)
|
||||
if isAlreadyExpandedAggregateType(t) {
|
||||
dst, src, mem := v.Args[0], v.Args[1], v.Args[2]
|
||||
mem = splitStore(dst, src, mem, v, t, 0, v.Pos)
|
||||
v.copyOf(mem)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val2Preds := make(map[*Value]int32) // Used to accumulate dependency graph of selection operations for topological ordering.
|
||||
|
||||
// Step 2: accumulate selection operations for rewrite in topological order.
|
||||
// Any select-for-addressing applied to call results can be transformed directly.
|
||||
// TODO this is overkill; with the transformation of aggregate references into series of leaf references, it is only necessary to remember and recurse on the leaves.
|
||||
for _, b := range f.Blocks {
|
||||
for _, v := range b.Values {
|
||||
// Accumulate chains of selectors for processing in topological order
|
||||
switch v.Op {
|
||||
case OpStructSelect, OpArraySelect, OpInt64Hi, OpInt64Lo:
|
||||
w := v.Args[0]
|
||||
switch w.Op {
|
||||
case OpStructSelect, OpArraySelect, OpInt64Hi, OpInt64Lo, OpSelectN:
|
||||
val2Preds[w] += 1
|
||||
if debug {
|
||||
fmt.Printf("v2p[%s] = %d\n", w.LongString(), val2Preds[w])
|
||||
}
|
||||
}
|
||||
fallthrough
|
||||
case OpSelectN:
|
||||
if _, ok := val2Preds[v]; !ok {
|
||||
val2Preds[v] = 0
|
||||
if debug {
|
||||
fmt.Printf("v2p[%s] = %d\n", v.LongString(), val2Preds[v])
|
||||
}
|
||||
}
|
||||
v.Type = t // not right for the mem operand yet, but will be when call is rewritten.
|
||||
|
||||
case OpSelectNAddr:
|
||||
// Do these directly, there are no chains of selectors.
|
||||
call := v.Args[0]
|
||||
which := v.AuxInt
|
||||
aux := call.Aux.(*AuxCall)
|
||||
|
|
@ -56,44 +381,72 @@ func expandCalls(f *Func) {
|
|||
}
|
||||
}
|
||||
|
||||
// Step 2: rewrite the calls
|
||||
// Compilation must be deterministic
|
||||
var ordered []*Value
|
||||
less := func(i, j int) bool { return ordered[i].ID < ordered[j].ID }
|
||||
|
||||
// Step 3: Rewrite in topological order. All chains of selectors end up in same block as the call.
|
||||
for len(val2Preds) > 0 {
|
||||
ordered = ordered[:0]
|
||||
for v, n := range val2Preds {
|
||||
if n == 0 {
|
||||
ordered = append(ordered, v)
|
||||
}
|
||||
}
|
||||
sort.Slice(ordered, less)
|
||||
for _, v := range ordered {
|
||||
for {
|
||||
w := v.Args[0]
|
||||
if debug {
|
||||
fmt.Printf("About to rewrite %s, args[0]=%s\n", v.LongString(), w.LongString())
|
||||
}
|
||||
delete(val2Preds, v)
|
||||
rewriteSelect(v, v, 0)
|
||||
v = w
|
||||
n, ok := val2Preds[v]
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
if n != 1 {
|
||||
val2Preds[v] = n - 1
|
||||
break
|
||||
}
|
||||
// Loop on new v; val2Preds[v] == 1 will be deleted in that iteration, no need to store zero.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: rewrite the calls themselves, correcting the type
|
||||
for _, b := range f.Blocks {
|
||||
for _, v := range b.Values {
|
||||
switch v.Op {
|
||||
case OpStaticLECall:
|
||||
// Thread the stores on the memory arg
|
||||
m0 := v.Args[len(v.Args)-1]
|
||||
mem := m0
|
||||
pos := v.Pos.WithNotStmt()
|
||||
aux := v.Aux.(*AuxCall)
|
||||
auxInt := v.AuxInt
|
||||
for i, a := range v.Args {
|
||||
if a == m0 {
|
||||
break
|
||||
}
|
||||
if a.Op == OpDereference {
|
||||
// "Dereference" of addressed (probably not-SSA-eligible) value becomes Move
|
||||
src := a.Args[0]
|
||||
dst := f.ConstOffPtrSP(src.Type, aux.OffsetOfArg(int64(i)), sp)
|
||||
a.reset(OpMove)
|
||||
a.Pos = pos
|
||||
a.Type = types.TypeMem
|
||||
a.Aux = aux.TypeOfArg(int64(i))
|
||||
a.AuxInt = aux.SizeOfArg(int64(i))
|
||||
a.SetArgs3(dst, src, mem)
|
||||
mem = a
|
||||
} else {
|
||||
// Add a new store.
|
||||
t := aux.TypeOfArg(int64(i))
|
||||
dst := f.ConstOffPtrSP(types.NewPtr(t), aux.OffsetOfArg(int64(i)), sp)
|
||||
mem = b.NewValue3A(pos, OpStore, types.TypeMem, t, dst, a, mem)
|
||||
}
|
||||
}
|
||||
v.reset(OpStaticCall)
|
||||
v.Op = OpStaticCall
|
||||
v.Type = types.TypeMem
|
||||
v.Aux = aux
|
||||
v.AuxInt = auxInt
|
||||
v.SetArgs1(mem)
|
||||
case OpClosureLECall:
|
||||
v.Op = OpClosureCall
|
||||
v.Type = types.TypeMem
|
||||
case OpInterLECall:
|
||||
v.Op = OpInterCall
|
||||
v.Type = types.TypeMem
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 5: elide any copies introduced.
|
||||
for _, b := range f.Blocks {
|
||||
for _, v := range b.Values {
|
||||
for i, a := range v.Args {
|
||||
if a.Op != OpCopy {
|
||||
continue
|
||||
}
|
||||
aa := copySource(a)
|
||||
v.SetArg(i, aa)
|
||||
for a.Uses == 0 {
|
||||
b := a.Args[0]
|
||||
a.reset(OpInvalid)
|
||||
a = b
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -672,7 +672,7 @@ func (f *Func) Idom() []*Block {
|
|||
return f.cachedIdom
|
||||
}
|
||||
|
||||
// sdom returns a sparse tree representing the dominator relationships
|
||||
// Sdom returns a sparse tree representing the dominator relationships
|
||||
// among the blocks of f.
|
||||
func (f *Func) Sdom() SparseTree {
|
||||
if f.cachedSdom == nil {
|
||||
|
|
|
|||
|
|
@ -38,10 +38,8 @@
|
|||
(Xor(32|16|8) ...) => (XORL ...)
|
||||
|
||||
(Neg(32|16|8) ...) => (NEGL ...)
|
||||
(Neg32F x) && !config.use387 => (PXOR x (MOVSSconst <typ.Float32> [float32(math.Copysign(0, -1))]))
|
||||
(Neg64F x) && !config.use387 => (PXOR x (MOVSDconst <typ.Float64> [math.Copysign(0, -1)]))
|
||||
(Neg32F x) && config.use387 => (FCHS x)
|
||||
(Neg64F x) && config.use387 => (FCHS x)
|
||||
(Neg32F x) => (PXOR x (MOVSSconst <typ.Float32> [float32(math.Copysign(0, -1))]))
|
||||
(Neg64F x) => (PXOR x (MOVSDconst <typ.Float64> [math.Copysign(0, -1)]))
|
||||
|
||||
(Com(32|16|8) ...) => (NOTL ...)
|
||||
|
||||
|
|
@ -670,8 +668,8 @@
|
|||
|
||||
// Merge load/store to op
|
||||
((ADD|AND|OR|XOR|SUB|MUL)L x l:(MOVLload [off] {sym} ptr mem)) && canMergeLoadClobber(v, l, x) && clobber(l) => ((ADD|AND|OR|XOR|SUB|MUL)Lload x [off] {sym} ptr mem)
|
||||
((ADD|SUB|MUL|DIV)SD x l:(MOVSDload [off] {sym} ptr mem)) && canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l) => ((ADD|SUB|MUL|DIV)SDload x [off] {sym} ptr mem)
|
||||
((ADD|SUB|MUL|DIV)SS x l:(MOVSSload [off] {sym} ptr mem)) && canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l) => ((ADD|SUB|MUL|DIV)SSload x [off] {sym} ptr mem)
|
||||
((ADD|SUB|MUL|DIV)SD x l:(MOVSDload [off] {sym} ptr mem)) && canMergeLoadClobber(v, l, x) && clobber(l) => ((ADD|SUB|MUL|DIV)SDload x [off] {sym} ptr mem)
|
||||
((ADD|SUB|MUL|DIV)SS x l:(MOVSSload [off] {sym} ptr mem)) && canMergeLoadClobber(v, l, x) && clobber(l) => ((ADD|SUB|MUL|DIV)SSload x [off] {sym} ptr mem)
|
||||
(MOVLstore {sym} [off] ptr y:((ADD|AND|OR|XOR)Lload x [off] {sym} ptr mem) mem) && y.Uses==1 && clobber(y) => ((ADD|AND|OR|XOR)Lmodify [off] {sym} ptr x mem)
|
||||
(MOVLstore {sym} [off] ptr y:((ADD|SUB|AND|OR|XOR)L l:(MOVLload [off] {sym} ptr mem) x) mem) && y.Uses==1 && l.Uses==1 && clobber(y, l) =>
|
||||
((ADD|SUB|AND|OR|XOR)Lmodify [off] {sym} ptr x mem)
|
||||
|
|
|
|||
|
|
@ -51,17 +51,6 @@ var regNames386 = []string{
|
|||
"SB",
|
||||
}
|
||||
|
||||
// Notes on 387 support.
|
||||
// - The 387 has a weird stack-register setup for floating-point registers.
|
||||
// We use these registers when SSE registers are not available (when GO386=387).
|
||||
// - We use the same register names (X0-X7) but they refer to the 387
|
||||
// floating-point registers. That way, most of the SSA backend is unchanged.
|
||||
// - The instruction generation pass maintains an SSE->387 register mapping.
|
||||
// This mapping is updated whenever the FP stack is pushed or popped so that
|
||||
// we can always find a given SSE register even when the TOS pointer has changed.
|
||||
// - To facilitate the mapping from SSE to 387, we enforce that
|
||||
// every basic block starts and ends with an empty floating-point stack.
|
||||
|
||||
func init() {
|
||||
// Make map from reg names to reg integers.
|
||||
if len(regNames386) > 64 {
|
||||
|
|
@ -552,9 +541,6 @@ func init() {
|
|||
{name: "FlagGT_UGT"}, // signed > and unsigned <
|
||||
{name: "FlagGT_ULT"}, // signed > and unsigned >
|
||||
|
||||
// Special op for -x on 387
|
||||
{name: "FCHS", argLength: 1, reg: fp11},
|
||||
|
||||
// Special ops for PIC floating-point constants.
|
||||
// MOVSXconst1 loads the address of the constant-pool entry into a register.
|
||||
// MOVSXconst2 loads the constant from that address.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -65,17 +65,17 @@
|
|||
|
||||
// count trailing zero for ARMv5 and ARMv6
|
||||
// 32 - CLZ(x&-x - 1)
|
||||
(Ctz32 <t> x) && objabi.GOARM<=6 ->
|
||||
(Ctz32 <t> x) && objabi.GOARM<=6 =>
|
||||
(RSBconst [32] (CLZ <t> (SUBconst <t> (AND <t> x (RSBconst <t> [0] x)) [1])))
|
||||
(Ctz16 <t> x) && objabi.GOARM<=6 ->
|
||||
(Ctz16 <t> x) && objabi.GOARM<=6 =>
|
||||
(RSBconst [32] (CLZ <t> (SUBconst <typ.UInt32> (AND <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x) (RSBconst <typ.UInt32> [0] (ORconst <typ.UInt32> [0x10000] x))) [1])))
|
||||
(Ctz8 <t> x) && objabi.GOARM<=6 ->
|
||||
(Ctz8 <t> x) && objabi.GOARM<=6 =>
|
||||
(RSBconst [32] (CLZ <t> (SUBconst <typ.UInt32> (AND <typ.UInt32> (ORconst <typ.UInt32> [0x100] x) (RSBconst <typ.UInt32> [0] (ORconst <typ.UInt32> [0x100] x))) [1])))
|
||||
|
||||
// count trailing zero for ARMv7
|
||||
(Ctz32 <t> x) && objabi.GOARM==7 -> (CLZ <t> (RBIT <t> x))
|
||||
(Ctz16 <t> x) && objabi.GOARM==7 -> (CLZ <t> (RBIT <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x)))
|
||||
(Ctz8 <t> x) && objabi.GOARM==7 -> (CLZ <t> (RBIT <typ.UInt32> (ORconst <typ.UInt32> [0x100] x)))
|
||||
(Ctz32 <t> x) && objabi.GOARM==7 => (CLZ <t> (RBIT <t> x))
|
||||
(Ctz16 <t> x) && objabi.GOARM==7 => (CLZ <t> (RBIT <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x)))
|
||||
(Ctz8 <t> x) && objabi.GOARM==7 => (CLZ <t> (RBIT <typ.UInt32> (ORconst <typ.UInt32> [0x100] x)))
|
||||
|
||||
// bit length
|
||||
(BitLen32 <t> x) => (RSBconst [32] (CLZ <t> x))
|
||||
|
|
@ -89,13 +89,13 @@
|
|||
// t5 = x right rotate 8 bits -- (d, a, b, c )
|
||||
// result = t4 ^ t5 -- (d, c, b, a )
|
||||
// using shifted ops this can be done in 4 instructions.
|
||||
(Bswap32 <t> x) && objabi.GOARM==5 ->
|
||||
(Bswap32 <t> x) && objabi.GOARM==5 =>
|
||||
(XOR <t>
|
||||
(SRLconst <t> (BICconst <t> (XOR <t> x (SRRconst <t> [16] x)) [0xff0000]) [8])
|
||||
(SRRconst <t> x [8]))
|
||||
|
||||
// byte swap for ARMv6 and above
|
||||
(Bswap32 x) && objabi.GOARM>=6 -> (REV x)
|
||||
(Bswap32 x) && objabi.GOARM>=6 => (REV x)
|
||||
|
||||
// boolean ops -- booleans are represented with 0=false, 1=true
|
||||
(AndB ...) => (AND ...)
|
||||
|
|
@ -145,15 +145,15 @@
|
|||
|
||||
// constant shifts
|
||||
// generic opt rewrites all constant shifts to shift by Const64
|
||||
(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SLLconst x [c])
|
||||
(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SRAconst x [c])
|
||||
(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 -> (SRLconst x [c])
|
||||
(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SLLconst x [c])
|
||||
(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
|
||||
(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 -> (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
|
||||
(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SLLconst x [c])
|
||||
(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
|
||||
(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 -> (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
|
||||
(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 => (SLLconst x [int32(c)])
|
||||
(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 => (SRAconst x [int32(c)])
|
||||
(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 => (SRLconst x [int32(c)])
|
||||
(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 => (SLLconst x [int32(c)])
|
||||
(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 => (SRAconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
|
||||
(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 => (SRLconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
|
||||
(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 => (SLLconst x [int32(c)])
|
||||
(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 => (SRAconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
|
||||
(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 => (SRLconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
|
||||
|
||||
// large constant shifts
|
||||
(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 => (Const32 [0])
|
||||
|
|
@ -260,23 +260,23 @@
|
|||
(Load <t> ptr mem) && is64BitFloat(t) => (MOVDload ptr mem)
|
||||
|
||||
// stores
|
||||
(Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (MOVBstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (MOVHstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVFstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.Size() == 1 => (MOVBstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.Size() == 2 => (MOVHstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.Size() == 4 && !is32BitFloat(val.Type) => (MOVWstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.Size() == 4 && is32BitFloat(val.Type) => (MOVFstore ptr val mem)
|
||||
(Store {t} ptr val mem) && t.Size() == 8 && is64BitFloat(val.Type) => (MOVDstore ptr val mem)
|
||||
|
||||
// zero instructions
|
||||
(Zero [0] _ mem) => mem
|
||||
(Zero [1] ptr mem) => (MOVBstore ptr (MOVWconst [0]) mem)
|
||||
(Zero [2] {t} ptr mem) && t.(*types.Type).Alignment()%2 == 0 ->
|
||||
(Zero [2] {t} ptr mem) && t.Alignment()%2 == 0 =>
|
||||
(MOVHstore ptr (MOVWconst [0]) mem)
|
||||
(Zero [2] ptr mem) ->
|
||||
(Zero [2] ptr mem) =>
|
||||
(MOVBstore [1] ptr (MOVWconst [0])
|
||||
(MOVBstore [0] ptr (MOVWconst [0]) mem))
|
||||
(Zero [4] {t} ptr mem) && t.(*types.Type).Alignment()%4 == 0 ->
|
||||
(Zero [4] {t} ptr mem) && t.Alignment()%4 == 0 =>
|
||||
(MOVWstore ptr (MOVWconst [0]) mem)
|
||||
(Zero [4] {t} ptr mem) && t.(*types.Type).Alignment()%2 == 0 ->
|
||||
(Zero [4] {t} ptr mem) && t.Alignment()%2 == 0 =>
|
||||
(MOVHstore [2] ptr (MOVWconst [0])
|
||||
(MOVHstore [0] ptr (MOVWconst [0]) mem))
|
||||
(Zero [4] ptr mem) =>
|
||||
|
|
@ -294,29 +294,29 @@
|
|||
// 4 and 128 are magic constants, see runtime/mkduff.go
|
||||
(Zero [s] {t} ptr mem)
|
||||
&& s%4 == 0 && s > 4 && s <= 512
|
||||
&& t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice ->
|
||||
&& t.Alignment()%4 == 0 && !config.noDuffDevice =>
|
||||
(DUFFZERO [4 * (128 - s/4)] ptr (MOVWconst [0]) mem)
|
||||
|
||||
// Large zeroing uses a loop
|
||||
(Zero [s] {t} ptr mem)
|
||||
&& (s > 512 || config.noDuffDevice) || t.(*types.Type).Alignment()%4 != 0 ->
|
||||
(LoweredZero [t.(*types.Type).Alignment()]
|
||||
&& (s > 512 || config.noDuffDevice) || t.Alignment()%4 != 0 =>
|
||||
(LoweredZero [t.Alignment()]
|
||||
ptr
|
||||
(ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)])
|
||||
(ADDconst <ptr.Type> ptr [int32(s-moveSize(t.Alignment(), config))])
|
||||
(MOVWconst [0])
|
||||
mem)
|
||||
|
||||
// moves
|
||||
(Move [0] _ _ mem) => mem
|
||||
(Move [1] dst src mem) => (MOVBstore dst (MOVBUload src mem) mem)
|
||||
(Move [2] {t} dst src mem) && t.(*types.Type).Alignment()%2 == 0 ->
|
||||
(Move [2] {t} dst src mem) && t.Alignment()%2 == 0 =>
|
||||
(MOVHstore dst (MOVHUload src mem) mem)
|
||||
(Move [2] dst src mem) =>
|
||||
(MOVBstore [1] dst (MOVBUload [1] src mem)
|
||||
(MOVBstore dst (MOVBUload src mem) mem))
|
||||
(Move [4] {t} dst src mem) && t.(*types.Type).Alignment()%4 == 0 ->
|
||||
(Move [4] {t} dst src mem) && t.Alignment()%4 == 0 =>
|
||||
(MOVWstore dst (MOVWload src mem) mem)
|
||||
(Move [4] {t} dst src mem) && t.(*types.Type).Alignment()%2 == 0 ->
|
||||
(Move [4] {t} dst src mem) && t.Alignment()%2 == 0 =>
|
||||
(MOVHstore [2] dst (MOVHUload [2] src mem)
|
||||
(MOVHstore dst (MOVHUload src mem) mem))
|
||||
(Move [4] dst src mem) =>
|
||||
|
|
@ -334,16 +334,16 @@
|
|||
// 8 and 128 are magic constants, see runtime/mkduff.go
|
||||
(Move [s] {t} dst src mem)
|
||||
&& s%4 == 0 && s > 4 && s <= 512
|
||||
&& t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice && logLargeCopy(v, s) ->
|
||||
&& t.Alignment()%4 == 0 && !config.noDuffDevice && logLargeCopy(v, s) =>
|
||||
(DUFFCOPY [8 * (128 - s/4)] dst src mem)
|
||||
|
||||
// Large move uses a loop
|
||||
(Move [s] {t} dst src mem)
|
||||
&& ((s > 512 || config.noDuffDevice) || t.(*types.Type).Alignment()%4 != 0) && logLargeCopy(v, s) ->
|
||||
(LoweredMove [t.(*types.Type).Alignment()]
|
||||
&& ((s > 512 || config.noDuffDevice) || t.Alignment()%4 != 0) && logLargeCopy(v, s) =>
|
||||
(LoweredMove [t.Alignment()]
|
||||
dst
|
||||
src
|
||||
(ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)])
|
||||
(ADDconst <src.Type> src [int32(s-moveSize(t.Alignment(), config))])
|
||||
mem)
|
||||
|
||||
// calls
|
||||
|
|
@ -432,31 +432,31 @@
|
|||
(MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) => (MOVDstore [off1+off2] {sym} ptr val mem)
|
||||
(MOVDstore [off1] {sym} (SUBconst [off2] ptr) val mem) => (MOVDstore [off1-off2] {sym} ptr val mem)
|
||||
|
||||
(MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||||
(MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVBload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVBUload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVHload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVHUload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVWload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVFload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
(MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVDload [off1+off2] {mergeSymTyped(sym1,sym2)} ptr mem)
|
||||
|
||||
(MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||||
(MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||||
(MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||||
(MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||||
(MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
|
||||
(MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||||
(MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVBstore [off1+off2] {mergeSymTyped(sym1,sym2)} ptr val mem)
|
||||
(MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVHstore [off1+off2] {mergeSymTyped(sym1,sym2)} ptr val mem)
|
||||
(MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVWstore [off1+off2] {mergeSymTyped(sym1,sym2)} ptr val mem)
|
||||
(MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVFstore [off1+off2] {mergeSymTyped(sym1,sym2)} ptr val mem)
|
||||
(MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) =>
|
||||
(MOVDstore [off1+off2] {mergeSymTyped(sym1,sym2)} ptr val mem)
|
||||
|
||||
// replace load from same location as preceding store with zero/sign extension (or copy in case of full width)
|
||||
(MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) => (MOVBreg x)
|
||||
|
|
@ -728,40 +728,40 @@
|
|||
(BICconst [c] _) && int32(c)==-1 => (MOVWconst [0])
|
||||
|
||||
// generic constant folding
|
||||
(ADDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) -> (SUBconst [int64(int32(-c))] x)
|
||||
(SUBconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) -> (ADDconst [int64(int32(-c))] x)
|
||||
(ANDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) -> (BICconst [int64(int32(^uint32(c)))] x)
|
||||
(BICconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) -> (ANDconst [int64(int32(^uint32(c)))] x)
|
||||
(ADDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff -> (SUBconst [int64(int32(-c))] x)
|
||||
(SUBconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff -> (ANDconst [int64(int32(-c))] x)
|
||||
(ANDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff -> (BICconst [int64(int32(^uint32(c)))] x)
|
||||
(BICconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff -> (ANDconst [int64(int32(^uint32(c)))] x)
|
||||
(ADDconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(c+d))])
|
||||
(ADDconst [c] (ADDconst [d] x)) -> (ADDconst [int64(int32(c+d))] x)
|
||||
(ADDconst [c] (SUBconst [d] x)) -> (ADDconst [int64(int32(c-d))] x)
|
||||
(ADDconst [c] (RSBconst [d] x)) -> (RSBconst [int64(int32(c+d))] x)
|
||||
(ADCconst [c] (ADDconst [d] x) flags) -> (ADCconst [int64(int32(c+d))] x flags)
|
||||
(ADCconst [c] (SUBconst [d] x) flags) -> (ADCconst [int64(int32(c-d))] x flags)
|
||||
(SUBconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d-c))])
|
||||
(SUBconst [c] (SUBconst [d] x)) -> (ADDconst [int64(int32(-c-d))] x)
|
||||
(SUBconst [c] (ADDconst [d] x)) -> (ADDconst [int64(int32(-c+d))] x)
|
||||
(SUBconst [c] (RSBconst [d] x)) -> (RSBconst [int64(int32(-c+d))] x)
|
||||
(SBCconst [c] (ADDconst [d] x) flags) -> (SBCconst [int64(int32(c-d))] x flags)
|
||||
(SBCconst [c] (SUBconst [d] x) flags) -> (SBCconst [int64(int32(c+d))] x flags)
|
||||
(RSBconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(c-d))])
|
||||
(RSBconst [c] (RSBconst [d] x)) -> (ADDconst [int64(int32(c-d))] x)
|
||||
(RSBconst [c] (ADDconst [d] x)) -> (RSBconst [int64(int32(c-d))] x)
|
||||
(RSBconst [c] (SUBconst [d] x)) -> (RSBconst [int64(int32(c+d))] x)
|
||||
(RSCconst [c] (ADDconst [d] x) flags) -> (RSCconst [int64(int32(c-d))] x flags)
|
||||
(RSCconst [c] (SUBconst [d] x) flags) -> (RSCconst [int64(int32(c+d))] x flags)
|
||||
(SLLconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)<<uint64(c)))])
|
||||
(SRLconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)>>uint64(c)))])
|
||||
(SRAconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d)>>uint64(c))])
|
||||
(MUL (MOVWconst [c]) (MOVWconst [d])) -> (MOVWconst [int64(int32(c*d))])
|
||||
(MULA (MOVWconst [c]) (MOVWconst [d]) a) -> (ADDconst [int64(int32(c*d))] a)
|
||||
(MULS (MOVWconst [c]) (MOVWconst [d]) a) -> (SUBconst [int64(int32(c*d))] a)
|
||||
(Select0 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) -> (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
|
||||
(Select1 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) -> (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
|
||||
(ADDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) => (SUBconst [-c] x)
|
||||
(SUBconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) => (ADDconst [-c] x)
|
||||
(ANDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) => (BICconst [int32(^uint32(c))] x)
|
||||
(BICconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) => (ANDconst [int32(^uint32(c))] x)
|
||||
(ADDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff => (SUBconst [-c] x)
|
||||
(SUBconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff => (ADDconst [-c] x)
|
||||
(ANDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff => (BICconst [int32(^uint32(c))] x)
|
||||
(BICconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff => (ANDconst [int32(^uint32(c))] x)
|
||||
(ADDconst [c] (MOVWconst [d])) => (MOVWconst [c+d])
|
||||
(ADDconst [c] (ADDconst [d] x)) => (ADDconst [c+d] x)
|
||||
(ADDconst [c] (SUBconst [d] x)) => (ADDconst [c-d] x)
|
||||
(ADDconst [c] (RSBconst [d] x)) => (RSBconst [c+d] x)
|
||||
(ADCconst [c] (ADDconst [d] x) flags) => (ADCconst [c+d] x flags)
|
||||
(ADCconst [c] (SUBconst [d] x) flags) => (ADCconst [c-d] x flags)
|
||||
(SUBconst [c] (MOVWconst [d])) => (MOVWconst [d-c])
|
||||
(SUBconst [c] (SUBconst [d] x)) => (ADDconst [-c-d] x)
|
||||
(SUBconst [c] (ADDconst [d] x)) => (ADDconst [-c+d] x)
|
||||
(SUBconst [c] (RSBconst [d] x)) => (RSBconst [-c+d] x)
|
||||
(SBCconst [c] (ADDconst [d] x) flags) => (SBCconst [c-d] x flags)
|
||||
(SBCconst [c] (SUBconst [d] x) flags) => (SBCconst [c+d] x flags)
|
||||
(RSBconst [c] (MOVWconst [d])) => (MOVWconst [c-d])
|
||||
(RSBconst [c] (RSBconst [d] x)) => (ADDconst [c-d] x)
|
||||
(RSBconst [c] (ADDconst [d] x)) => (RSBconst [c-d] x)
|
||||
(RSBconst [c] (SUBconst [d] x)) => (RSBconst [c+d] x)
|
||||
(RSCconst [c] (ADDconst [d] x) flags) => (RSCconst [c-d] x flags)
|
||||
(RSCconst [c] (SUBconst [d] x) flags) => (RSCconst [c+d] x flags)
|
||||
(SLLconst [c] (MOVWconst [d])) => (MOVWconst [d<<uint64(c)])
|
||||
(SRLconst [c] (MOVWconst [d])) => (MOVWconst [int32(uint32(d)>>uint64(c))])
|
||||
(SRAconst [c] (MOVWconst [d])) => (MOVWconst [d>>uint64(c)])
|
||||
(MUL (MOVWconst [c]) (MOVWconst [d])) => (MOVWconst [c*d])
|
||||
(MULA (MOVWconst [c]) (MOVWconst [d]) a) => (ADDconst [c*d] a)
|
||||
(MULS (MOVWconst [c]) (MOVWconst [d]) a) => (SUBconst [c*d] a)
|
||||
(Select0 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) => (MOVWconst [int32(uint32(c)/uint32(d))])
|
||||
(Select1 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) => (MOVWconst [int32(uint32(c)%uint32(d))])
|
||||
(ANDconst [c] (MOVWconst [d])) => (MOVWconst [c&d])
|
||||
(ANDconst [c] (ANDconst [d] x)) => (ANDconst [c&d] x)
|
||||
(ORconst [c] (MOVWconst [d])) => (MOVWconst [c|d])
|
||||
|
|
@ -769,16 +769,16 @@
|
|||
(XORconst [c] (MOVWconst [d])) => (MOVWconst [c^d])
|
||||
(XORconst [c] (XORconst [d] x)) => (XORconst [c^d] x)
|
||||
(BICconst [c] (MOVWconst [d])) => (MOVWconst [d&^c])
|
||||
(BICconst [c] (BICconst [d] x)) -> (BICconst [int64(int32(c|d))] x)
|
||||
(BICconst [c] (BICconst [d] x)) => (BICconst [c|d] x)
|
||||
(MVN (MOVWconst [c])) => (MOVWconst [^c])
|
||||
(MOVBreg (MOVWconst [c])) -> (MOVWconst [int64(int8(c))])
|
||||
(MOVBUreg (MOVWconst [c])) -> (MOVWconst [int64(uint8(c))])
|
||||
(MOVHreg (MOVWconst [c])) -> (MOVWconst [int64(int16(c))])
|
||||
(MOVHUreg (MOVWconst [c])) -> (MOVWconst [int64(uint16(c))])
|
||||
(MOVBreg (MOVWconst [c])) => (MOVWconst [int32(int8(c))])
|
||||
(MOVBUreg (MOVWconst [c])) => (MOVWconst [int32(uint8(c))])
|
||||
(MOVHreg (MOVWconst [c])) => (MOVWconst [int32(int16(c))])
|
||||
(MOVHUreg (MOVWconst [c])) => (MOVWconst [int32(uint16(c))])
|
||||
(MOVWreg (MOVWconst [c])) => (MOVWconst [c])
|
||||
// BFX: Width = c >> 8, LSB = c & 0xff, result = d << (32 - Width - LSB) >> (32 - Width)
|
||||
(BFX [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
|
||||
(BFXU [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))))])
|
||||
(BFX [c] (MOVWconst [d])) => (MOVWconst [d<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))])
|
||||
(BFXU [c] (MOVWconst [d])) => (MOVWconst [int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
|
||||
|
||||
// absorb shifts into ops
|
||||
(ADD x (SLLconst [c] y)) => (ADDshiftLL x y [c])
|
||||
|
|
@ -1011,61 +1011,61 @@
|
|||
(CMNshiftRAreg (MOVWconst [c]) x y) => (CMNconst [c] (SRA <x.Type> x y))
|
||||
|
||||
// constant folding in *shift ops
|
||||
(ADDshiftLL x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(ADDshiftRL x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(ADDshiftRA x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(c)>>uint64(d))])
|
||||
(ADCshiftLL x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
|
||||
(ADCshiftRL x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
|
||||
(ADCshiftRA x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(c)>>uint64(d))] flags)
|
||||
(ADDSshiftLL x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(ADDSshiftRL x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(ADDSshiftRA x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(c)>>uint64(d))])
|
||||
(SUBshiftLL x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(SUBshiftRL x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(SUBshiftRA x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(c)>>uint64(d))])
|
||||
(SBCshiftLL x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
|
||||
(SBCshiftRL x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
|
||||
(SBCshiftRA x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(c)>>uint64(d))] flags)
|
||||
(SUBSshiftLL x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(SUBSshiftRL x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(SUBSshiftRA x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(c)>>uint64(d))])
|
||||
(RSBshiftLL x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(RSBshiftRL x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(RSBshiftRA x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(c)>>uint64(d))])
|
||||
(RSCshiftLL x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
|
||||
(RSCshiftRL x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
|
||||
(RSCshiftRA x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(c)>>uint64(d))] flags)
|
||||
(RSBSshiftLL x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(RSBSshiftRL x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(RSBSshiftRA x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(c)>>uint64(d))])
|
||||
(ANDshiftLL x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(ANDshiftRL x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(ANDshiftRA x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(c)>>uint64(d))])
|
||||
(ORshiftLL x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(ORshiftRL x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(ORshiftRA x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(c)>>uint64(d))])
|
||||
(XORshiftLL x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(XORshiftRL x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(XORshiftRA x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(c)>>uint64(d))])
|
||||
(XORshiftRR x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d)))])
|
||||
(BICshiftLL x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(BICshiftRL x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(BICshiftRA x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(c)>>uint64(d))])
|
||||
(MVNshiftLL (MOVWconst [c]) [d]) -> (MOVWconst [^int64(uint32(c)<<uint64(d))])
|
||||
(ADDshiftLL x (MOVWconst [c]) [d]) => (ADDconst x [c<<uint64(d)])
|
||||
(ADDshiftRL x (MOVWconst [c]) [d]) => (ADDconst x [int32(uint32(c)>>uint64(d))])
|
||||
(ADDshiftRA x (MOVWconst [c]) [d]) => (ADDconst x [c>>uint64(d)])
|
||||
(ADCshiftLL x (MOVWconst [c]) [d] flags) => (ADCconst x [c<<uint64(d)] flags)
|
||||
(ADCshiftRL x (MOVWconst [c]) [d] flags) => (ADCconst x [int32(uint32(c)>>uint64(d))] flags)
|
||||
(ADCshiftRA x (MOVWconst [c]) [d] flags) => (ADCconst x [c>>uint64(d)] flags)
|
||||
(ADDSshiftLL x (MOVWconst [c]) [d]) => (ADDSconst x [c<<uint64(d)])
|
||||
(ADDSshiftRL x (MOVWconst [c]) [d]) => (ADDSconst x [int32(uint32(c)>>uint64(d))])
|
||||
(ADDSshiftRA x (MOVWconst [c]) [d]) => (ADDSconst x [c>>uint64(d)])
|
||||
(SUBshiftLL x (MOVWconst [c]) [d]) => (SUBconst x [c<<uint64(d)])
|
||||
(SUBshiftRL x (MOVWconst [c]) [d]) => (SUBconst x [int32(uint32(c)>>uint64(d))])
|
||||
(SUBshiftRA x (MOVWconst [c]) [d]) => (SUBconst x [c>>uint64(d)])
|
||||
(SBCshiftLL x (MOVWconst [c]) [d] flags) => (SBCconst x [c<<uint64(d)] flags)
|
||||
(SBCshiftRL x (MOVWconst [c]) [d] flags) => (SBCconst x [int32(uint32(c)>>uint64(d))] flags)
|
||||
(SBCshiftRA x (MOVWconst [c]) [d] flags) => (SBCconst x [c>>uint64(d)] flags)
|
||||
(SUBSshiftLL x (MOVWconst [c]) [d]) => (SUBSconst x [c<<uint64(d)])
|
||||
(SUBSshiftRL x (MOVWconst [c]) [d]) => (SUBSconst x [int32(uint32(c)>>uint64(d))])
|
||||
(SUBSshiftRA x (MOVWconst [c]) [d]) => (SUBSconst x [c>>uint64(d)])
|
||||
(RSBshiftLL x (MOVWconst [c]) [d]) => (RSBconst x [c<<uint64(d)])
|
||||
(RSBshiftRL x (MOVWconst [c]) [d]) => (RSBconst x [int32(uint32(c)>>uint64(d))])
|
||||
(RSBshiftRA x (MOVWconst [c]) [d]) => (RSBconst x [c>>uint64(d)])
|
||||
(RSCshiftLL x (MOVWconst [c]) [d] flags) => (RSCconst x [c<<uint64(d)] flags)
|
||||
(RSCshiftRL x (MOVWconst [c]) [d] flags) => (RSCconst x [int32(uint32(c)>>uint64(d))] flags)
|
||||
(RSCshiftRA x (MOVWconst [c]) [d] flags) => (RSCconst x [c>>uint64(d)] flags)
|
||||
(RSBSshiftLL x (MOVWconst [c]) [d]) => (RSBSconst x [c<<uint64(d)])
|
||||
(RSBSshiftRL x (MOVWconst [c]) [d]) => (RSBSconst x [int32(uint32(c)>>uint64(d))])
|
||||
(RSBSshiftRA x (MOVWconst [c]) [d]) => (RSBSconst x [c>>uint64(d)])
|
||||
(ANDshiftLL x (MOVWconst [c]) [d]) => (ANDconst x [c<<uint64(d)])
|
||||
(ANDshiftRL x (MOVWconst [c]) [d]) => (ANDconst x [int32(uint32(c)>>uint64(d))])
|
||||
(ANDshiftRA x (MOVWconst [c]) [d]) => (ANDconst x [c>>uint64(d)])
|
||||
(ORshiftLL x (MOVWconst [c]) [d]) => (ORconst x [c<<uint64(d)])
|
||||
(ORshiftRL x (MOVWconst [c]) [d]) => (ORconst x [int32(uint32(c)>>uint64(d))])
|
||||
(ORshiftRA x (MOVWconst [c]) [d]) => (ORconst x [c>>uint64(d)])
|
||||
(XORshiftLL x (MOVWconst [c]) [d]) => (XORconst x [c<<uint64(d)])
|
||||
(XORshiftRL x (MOVWconst [c]) [d]) => (XORconst x [int32(uint32(c)>>uint64(d))])
|
||||
(XORshiftRA x (MOVWconst [c]) [d]) => (XORconst x [c>>uint64(d)])
|
||||
(XORshiftRR x (MOVWconst [c]) [d]) => (XORconst x [int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d))])
|
||||
(BICshiftLL x (MOVWconst [c]) [d]) => (BICconst x [c<<uint64(d)])
|
||||
(BICshiftRL x (MOVWconst [c]) [d]) => (BICconst x [int32(uint32(c)>>uint64(d))])
|
||||
(BICshiftRA x (MOVWconst [c]) [d]) => (BICconst x [c>>uint64(d)])
|
||||
(MVNshiftLL (MOVWconst [c]) [d]) => (MOVWconst [^(c<<uint64(d))])
|
||||
(MVNshiftRL (MOVWconst [c]) [d]) -> (MOVWconst [^int64(uint32(c)>>uint64(d))])
|
||||
(MVNshiftRA (MOVWconst [c]) [d]) -> (MOVWconst [^int64(int32(c)>>uint64(d))])
|
||||
(CMPshiftLL x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(CMPshiftRL x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(CMPshiftRA x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(c)>>uint64(d))])
|
||||
(TSTshiftLL x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(TSTshiftRL x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(TSTshiftRA x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(c)>>uint64(d))])
|
||||
(TEQshiftLL x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(TEQshiftRL x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(TEQshiftRA x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(c)>>uint64(d))])
|
||||
(CMNshiftLL x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(uint32(c)<<uint64(d)))])
|
||||
(CMNshiftRL x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(uint32(c)>>uint64(d)))])
|
||||
(CMNshiftRA x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(c)>>uint64(d))])
|
||||
(CMPshiftLL x (MOVWconst [c]) [d]) => (CMPconst x [c<<uint64(d)])
|
||||
(CMPshiftRL x (MOVWconst [c]) [d]) => (CMPconst x [int32(uint32(c)>>uint64(d))])
|
||||
(CMPshiftRA x (MOVWconst [c]) [d]) => (CMPconst x [c>>uint64(d)])
|
||||
(TSTshiftLL x (MOVWconst [c]) [d]) => (TSTconst x [c<<uint64(d)])
|
||||
(TSTshiftRL x (MOVWconst [c]) [d]) => (TSTconst x [int32(uint32(c)>>uint64(d))])
|
||||
(TSTshiftRA x (MOVWconst [c]) [d]) => (TSTconst x [c>>uint64(d)])
|
||||
(TEQshiftLL x (MOVWconst [c]) [d]) => (TEQconst x [c<<uint64(d)])
|
||||
(TEQshiftRL x (MOVWconst [c]) [d]) => (TEQconst x [int32(uint32(c)>>uint64(d))])
|
||||
(TEQshiftRA x (MOVWconst [c]) [d]) => (TEQconst x [c>>uint64(d)])
|
||||
(CMNshiftLL x (MOVWconst [c]) [d]) => (CMNconst x [c<<uint64(d)])
|
||||
(CMNshiftRL x (MOVWconst [c]) [d]) => (CMNconst x [int32(uint32(c)>>uint64(d))])
|
||||
(CMNshiftRA x (MOVWconst [c]) [d]) => (CMNconst x [c>>uint64(d)])
|
||||
|
||||
(ADDshiftLLreg x y (MOVWconst [c])) => (ADDshiftLL x y [c])
|
||||
(ADDshiftRLreg x y (MOVWconst [c])) => (ADDshiftRL x y [c])
|
||||
|
|
@ -1139,7 +1139,7 @@
|
|||
// UBFX instruction is supported by ARMv6T2, ARMv7 and above versions, REV16 is supported by
|
||||
// ARMv6 and above versions. So for ARMv6, we need to match SLLconst, SRLconst and ORshiftLL.
|
||||
((ADDshiftLL|ORshiftLL|XORshiftLL) <typ.UInt16> [8] (BFXU <typ.UInt16> [int32(armBFAuxInt(8, 8))] x) x) => (REV16 x)
|
||||
((ADDshiftLL|ORshiftLL|XORshiftLL) <typ.UInt16> [8] (SRLconst <typ.UInt16> [24] (SLLconst [16] x)) x) && objabi.GOARM>=6 -> (REV16 x)
|
||||
((ADDshiftLL|ORshiftLL|XORshiftLL) <typ.UInt16> [8] (SRLconst <typ.UInt16> [24] (SLLconst [16] x)) x) && objabi.GOARM>=6 => (REV16 x)
|
||||
|
||||
// use indexed loads and stores
|
||||
(MOVWload [0] {sym} (ADD ptr idx) mem) && sym == nil => (MOVWloadidx ptr idx mem)
|
||||
|
|
@ -1192,11 +1192,11 @@
|
|||
|
||||
(MOVWloadshiftLL ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(uint32(c)<<uint64(d))] ptr mem)
|
||||
(MOVWloadshiftRL ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(uint32(c)>>uint64(d))] ptr mem)
|
||||
(MOVWloadshiftRA ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(int32(c)>>uint64(d))] ptr mem)
|
||||
(MOVWloadshiftRA ptr (MOVWconst [c]) [d] mem) => (MOVWload [c>>uint64(d)] ptr mem)
|
||||
|
||||
(MOVWstoreshiftLL ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(uint32(c)<<uint64(d))] ptr val mem)
|
||||
(MOVWstoreshiftRL ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(uint32(c)>>uint64(d))] ptr val mem)
|
||||
(MOVWstoreshiftRA ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(int32(c)>>uint64(d))] ptr val mem)
|
||||
(MOVWstoreshiftRA ptr (MOVWconst [c]) [d] val mem) => (MOVWstore [c>>uint64(d)] ptr val mem)
|
||||
|
||||
// generic simplifications
|
||||
(ADD x (RSBconst [0] y)) => (SUB x y)
|
||||
|
|
@ -1209,25 +1209,25 @@
|
|||
(BIC x x) => (MOVWconst [0])
|
||||
|
||||
(ADD (MUL x y) a) => (MULA x y a)
|
||||
(SUB a (MUL x y)) && objabi.GOARM == 7 -> (MULS x y a)
|
||||
(RSB (MUL x y) a) && objabi.GOARM == 7 -> (MULS x y a)
|
||||
(SUB a (MUL x y)) && objabi.GOARM == 7 => (MULS x y a)
|
||||
(RSB (MUL x y) a) && objabi.GOARM == 7 => (MULS x y a)
|
||||
|
||||
(NEGF (MULF x y)) && objabi.GOARM >= 6 -> (NMULF x y)
|
||||
(NEGD (MULD x y)) && objabi.GOARM >= 6 -> (NMULD x y)
|
||||
(MULF (NEGF x) y) && objabi.GOARM >= 6 -> (NMULF x y)
|
||||
(MULD (NEGD x) y) && objabi.GOARM >= 6 -> (NMULD x y)
|
||||
(NEGF (MULF x y)) && objabi.GOARM >= 6 => (NMULF x y)
|
||||
(NEGD (MULD x y)) && objabi.GOARM >= 6 => (NMULD x y)
|
||||
(MULF (NEGF x) y) && objabi.GOARM >= 6 => (NMULF x y)
|
||||
(MULD (NEGD x) y) && objabi.GOARM >= 6 => (NMULD x y)
|
||||
(NMULF (NEGF x) y) => (MULF x y)
|
||||
(NMULD (NEGD x) y) => (MULD x y)
|
||||
|
||||
// the result will overwrite the addend, since they are in the same register
|
||||
(ADDF a (MULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULAF a x y)
|
||||
(ADDF a (NMULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULSF a x y)
|
||||
(ADDD a (MULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULAD a x y)
|
||||
(ADDD a (NMULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULSD a x y)
|
||||
(SUBF a (MULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULSF a x y)
|
||||
(SUBF a (NMULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULAF a x y)
|
||||
(SUBD a (MULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULSD a x y)
|
||||
(SUBD a (NMULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 -> (MULAD a x y)
|
||||
(ADDF a (MULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULAF a x y)
|
||||
(ADDF a (NMULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULSF a x y)
|
||||
(ADDD a (MULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULAD a x y)
|
||||
(ADDD a (NMULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULSD a x y)
|
||||
(SUBF a (MULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULSF a x y)
|
||||
(SUBF a (NMULF x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULAF a x y)
|
||||
(SUBD a (MULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULSD a x y)
|
||||
(SUBD a (NMULD x y)) && a.Uses == 1 && objabi.GOARM >= 6 => (MULAD a x y)
|
||||
|
||||
(AND x (MVN y)) => (BIC x y)
|
||||
|
||||
|
|
@ -1259,12 +1259,12 @@
|
|||
(CMPD x (MOVDconst [0])) => (CMPD0 x)
|
||||
|
||||
// bit extraction
|
||||
(SRAconst (SLLconst x [c]) [d]) && objabi.GOARM==7 && uint64(d)>=uint64(c) && uint64(d)<=31 -> (BFX [(d-c)|(32-d)<<8] x)
|
||||
(SRLconst (SLLconst x [c]) [d]) && objabi.GOARM==7 && uint64(d)>=uint64(c) && uint64(d)<=31 -> (BFXU [(d-c)|(32-d)<<8] x)
|
||||
(SRAconst (SLLconst x [c]) [d]) && objabi.GOARM==7 && uint64(d)>=uint64(c) && uint64(d)<=31 => (BFX [(d-c)|(32-d)<<8] x)
|
||||
(SRLconst (SLLconst x [c]) [d]) && objabi.GOARM==7 && uint64(d)>=uint64(c) && uint64(d)<=31 => (BFXU [(d-c)|(32-d)<<8] x)
|
||||
|
||||
// comparison simplification
|
||||
(CMP x (RSBconst [0] y)) => (CMN x y)
|
||||
(CMN x (RSBconst [0] y)) => (CMP x y)
|
||||
((LT|LE|EQ|NE|GE|GT) (CMP x (RSBconst [0] y))) => ((LT|LE|EQ|NE|GE|GT) (CMN x y)) // sense of carry bit not preserved
|
||||
((LT|LE|EQ|NE|GE|GT) (CMN x (RSBconst [0] y))) => ((LT|LE|EQ|NE|GE|GT) (CMP x y)) // sense of carry bit not preserved
|
||||
(EQ (CMPconst [0] l:(SUB x y)) yes no) && l.Uses==1 => (EQ (CMP x y) yes no)
|
||||
(EQ (CMPconst [0] l:(MULS x y a)) yes no) && l.Uses==1 => (EQ (CMP a (MUL <x.Type> x y)) yes no)
|
||||
(EQ (CMPconst [0] l:(SUBconst [c] x)) yes no) && l.Uses==1 => (EQ (CMPconst [c] x) yes no)
|
||||
|
|
|
|||
|
|
@ -821,6 +821,8 @@
|
|||
|
||||
(ADDconst [c] (MOVDaddr [d] {sym} x)) && is32Bit(c+int64(d)) => (MOVDaddr [int32(c+int64(d))] {sym} x)
|
||||
|
||||
(MULL(W|D) x (MOVDconst [c])) && is16Bit(c) => (MULL(W|D)const [int32(c)] x)
|
||||
|
||||
// Subtract from (with carry, but ignored) constant.
|
||||
// Note, these clobber the carry bit.
|
||||
(SUB (MOVDconst [c]) x) && is32Bit(c) => (SUBFCconst [c] x)
|
||||
|
|
@ -1018,13 +1020,14 @@
|
|||
(SLDconst [c] z:(MOVHZreg x)) && c < 16 && z.Uses == 1 => (CLRLSLDI [newPPC64ShiftAuxInt(c,48,63,64)] x)
|
||||
(SLDconst [c] z:(MOVWZreg x)) && c < 32 && z.Uses == 1 => (CLRLSLDI [newPPC64ShiftAuxInt(c,32,63,64)] x)
|
||||
|
||||
(SLDconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
(SLDconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
(SLDconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d)) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
(SLDconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(64-getPPC64ShiftMaskLength(d)) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
(SLWconst [c] z:(MOVBZreg x)) && z.Uses == 1 && c < 8 => (CLRLSLWI [newPPC64ShiftAuxInt(c,24,31,32)] x)
|
||||
(SLWconst [c] z:(MOVHZreg x)) && z.Uses == 1 && c < 16 => (CLRLSLWI [newPPC64ShiftAuxInt(c,16,31,32)] x)
|
||||
(SLWconst [c] z:(MOVWZreg x)) && z.Uses == 1 && c < 24 => (CLRLSLWI [newPPC64ShiftAuxInt(c,8,31,32)] x)
|
||||
(SLWconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
(SLWconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
(SLWconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d)) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
(SLWconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d)) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
// special case for power9
|
||||
(SL(W|D)const [c] z:(MOVWreg x)) && c < 32 && objabi.GOPPC64 >= 9 => (EXTSWSLconst [c] x)
|
||||
|
||||
// Lose widening ops fed to stores
|
||||
(MOVBstore [off] {sym} ptr (MOV(B|BZ|H|HZ|W|WZ)reg x) mem) => (MOVBstore [off] {sym} ptr x mem)
|
||||
|
|
|
|||
|
|
@ -181,6 +181,8 @@ func init() {
|
|||
|
||||
{name: "MULLD", argLength: 2, reg: gp21, asm: "MULLD", typ: "Int64", commutative: true}, // arg0*arg1 (signed 64-bit)
|
||||
{name: "MULLW", argLength: 2, reg: gp21, asm: "MULLW", typ: "Int32", commutative: true}, // arg0*arg1 (signed 32-bit)
|
||||
{name: "MULLDconst", argLength: 1, reg: gp11, asm: "MULLD", aux: "Int32", typ: "Int64"}, // arg0*auxInt (signed 64-bit)
|
||||
{name: "MULLWconst", argLength: 1, reg: gp11, asm: "MULLW", aux: "Int32", typ: "Int64"}, // arg0*auxInt (signed 64-bit)
|
||||
{name: "MADDLD", argLength: 3, reg: gp31, asm: "MADDLD", typ: "Int64"}, // (arg0*arg1)+arg2 (signed 64-bit)
|
||||
|
||||
{name: "MULHD", argLength: 2, reg: gp21, asm: "MULHD", commutative: true}, // (arg0 * arg1) >> 64, signed
|
||||
|
|
@ -223,6 +225,7 @@ func init() {
|
|||
|
||||
{name: "ROTLconst", argLength: 1, reg: gp11, asm: "ROTL", aux: "Int64"}, // arg0 rotate left by auxInt bits
|
||||
{name: "ROTLWconst", argLength: 1, reg: gp11, asm: "ROTLW", aux: "Int64"}, // uint32(arg0) rotate left by auxInt bits
|
||||
{name: "EXTSWSLconst", argLength: 1, reg: gp11, asm: "EXTSWSLI", aux: "Int64"},
|
||||
|
||||
{name: "CNTLZD", argLength: 1, reg: gp11, asm: "CNTLZD", clobberFlags: true}, // count leading zeros
|
||||
{name: "CNTLZW", argLength: 1, reg: gp11, asm: "CNTLZW", clobberFlags: true}, // count leading zeros (32 bit)
|
||||
|
|
|
|||
|
|
@ -1961,6 +1961,31 @@
|
|||
&& warnRule(fe.Debug_checknil(), v, "removed nil check")
|
||||
=> (Invalid)
|
||||
|
||||
// for late-expanded calls
|
||||
(Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
|
||||
&& isSameCall(call.Aux, "runtime.newobject")
|
||||
=> mem
|
||||
|
||||
(Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
|
||||
&& isConstZero(x)
|
||||
&& isSameCall(call.Aux, "runtime.newobject")
|
||||
=> mem
|
||||
|
||||
(Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
|
||||
&& isConstZero(x)
|
||||
&& isSameCall(call.Aux, "runtime.newobject")
|
||||
=> mem
|
||||
|
||||
(NilCheck (SelectN [0] call:(StaticLECall _ _)) (SelectN [1] call))
|
||||
&& isSameCall(call.Aux, "runtime.newobject")
|
||||
&& warnRule(fe.Debug_checknil(), v, "removed nil check")
|
||||
=> (Invalid)
|
||||
|
||||
(NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) (SelectN [1] call))
|
||||
&& isSameCall(call.Aux, "runtime.newobject")
|
||||
&& warnRule(fe.Debug_checknil(), v, "removed nil check")
|
||||
=> (Invalid)
|
||||
|
||||
// Evaluate constant address comparisons.
|
||||
(EqPtr x x) => (ConstBool [true])
|
||||
(NeqPtr x x) => (ConstBool [false])
|
||||
|
|
@ -2017,6 +2042,17 @@
|
|||
&& clobber(s1, s2, s3)
|
||||
=> (Move {t.Elem()} [int64(sz)] dst src mem)
|
||||
|
||||
// Inline small or disjoint runtime.memmove calls with constant length.
|
||||
// See the comment in op Move in genericOps.go for discussion of the type.
|
||||
(SelectN [0] call:(StaticLECall {sym} dst src (Const(64|32) [sz]) mem))
|
||||
&& sz >= 0
|
||||
&& call.Uses == 1 // this will exclude all calls with results
|
||||
&& isSameCall(sym, "runtime.memmove")
|
||||
&& dst.Type.IsPtr() // avoids TUINTPTR, see issue 30061
|
||||
&& isInlinableMemmove(dst, src, int64(sz), config)
|
||||
&& clobber(call)
|
||||
=> (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
|
||||
|
||||
// De-virtualize interface calls into static calls.
|
||||
// Note that (ITab (IMake)) doesn't get
|
||||
// rewritten until after the first opt pass,
|
||||
|
|
@ -2024,6 +2060,13 @@
|
|||
(InterCall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) mem) && devirt(v, auxCall, itab, off) != nil =>
|
||||
(StaticCall [int32(argsize)] {devirt(v, auxCall, itab, off)} mem)
|
||||
|
||||
// De-virtualize late-expanded interface calls into late-expanded static calls.
|
||||
// Note that (ITab (IMake)) doesn't get rewritten until after the first opt pass,
|
||||
// so this rule should trigger reliably.
|
||||
// devirtLECall removes the first argument, adds the devirtualized symbol to the AuxCall, and changes the opcode
|
||||
(InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___) && devirtLESym(v, auxCall, itab, off) !=
|
||||
nil => devirtLECall(v, devirtLESym(v, auxCall, itab, off))
|
||||
|
||||
// Move and Zero optimizations.
|
||||
// Move source and destination may overlap.
|
||||
|
||||
|
|
@ -2404,6 +2447,7 @@
|
|||
(Store {t5} (OffPtr <tt5> [o5] dst) d4
|
||||
(Zero {t1} [n] dst mem)))))
|
||||
|
||||
// TODO this does not fire before call expansion; is that acceptable?
|
||||
(StaticCall {sym} x) && needRaceCleanup(sym, v) => x
|
||||
|
||||
// Collapse moving A -> B -> C into just A -> C.
|
||||
|
|
|
|||
|
|
@ -389,10 +389,12 @@ var genericOps = []opData{
|
|||
// TODO(josharian): ClosureCall and InterCall should have Int32 aux
|
||||
// to match StaticCall's 32 bit arg size limit.
|
||||
// TODO(drchase,josharian): could the arg size limit be bundled into the rules for CallOff?
|
||||
{name: "ClosureCall", argLength: 3, aux: "CallOff", call: true}, // arg0=code pointer, arg1=context ptr, arg2=memory. auxint=arg size. Returns memory.
|
||||
{name: "StaticCall", argLength: 1, aux: "CallOff", call: true}, // call function aux.(*obj.LSym), arg0=memory. auxint=arg size. Returns memory.
|
||||
{name: "InterCall", argLength: 2, aux: "CallOff", call: true}, // interface call. arg0=code pointer, arg1=memory, auxint=arg size. Returns memory.
|
||||
{name: "StaticLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded static call function aux.(*ssa.AuxCall.Fn). arg0..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "ClosureCall", argLength: 3, aux: "CallOff", call: true}, // arg0=code pointer, arg1=context ptr, arg2=memory. auxint=arg size. Returns memory.
|
||||
{name: "StaticCall", argLength: 1, aux: "CallOff", call: true}, // call function aux.(*obj.LSym), arg0=memory. auxint=arg size. Returns memory.
|
||||
{name: "InterCall", argLength: 2, aux: "CallOff", call: true}, // interface call. arg0=code pointer, arg1=memory, auxint=arg size. Returns memory.
|
||||
{name: "ClosureLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded closure call. arg0=code pointer, arg1=context ptr, arg2..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "StaticLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded static call function aux.(*ssa.AuxCall.Fn). arg0..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "InterLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded interface call. arg0=code pointer, arg1..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
|
||||
// Conversions: signed extensions, zero (unsigned) extensions, truncations
|
||||
{name: "SignExt8to16", argLength: 1, typ: "Int16"},
|
||||
|
|
@ -539,10 +541,10 @@ var genericOps = []opData{
|
|||
{name: "SelectN", argLength: 1, aux: "Int64"}, // arg0=tuple, auxint=field index. Returns the auxint'th member.
|
||||
{name: "SelectNAddr", argLength: 1, aux: "Int64"}, // arg0=tuple, auxint=field index. Returns the address of auxint'th member. Used for un-SSA-able result types.
|
||||
|
||||
// Atomic operations used for semantically inlining runtime/internal/atomic.
|
||||
// Atomic loads return a new memory so that the loads are properly ordered
|
||||
// with respect to other loads and stores.
|
||||
// TODO: use for sync/atomic at some point.
|
||||
// Atomic operations used for semantically inlining sync/atomic and
|
||||
// runtime/internal/atomic. Atomic loads return a new memory so that
|
||||
// the loads are properly ordered with respect to other loads and
|
||||
// stores.
|
||||
{name: "AtomicLoad8", argLength: 2, typ: "(UInt8,Mem)"}, // Load from arg0. arg1=memory. Returns loaded value and new memory.
|
||||
{name: "AtomicLoad32", argLength: 2, typ: "(UInt32,Mem)"}, // Load from arg0. arg1=memory. Returns loaded value and new memory.
|
||||
{name: "AtomicLoad64", argLength: 2, typ: "(UInt64,Mem)"}, // Load from arg0. arg1=memory. Returns loaded value and new memory.
|
||||
|
|
|
|||
|
|
@ -50,8 +50,12 @@ import (
|
|||
// variable ::= some token
|
||||
// opcode ::= one of the opcodes from the *Ops.go files
|
||||
|
||||
// special rules: trailing ellipsis "..." (in the outermost sexpr?) must match on both sides of a rule.
|
||||
// trailing three underscore "___" in the outermost match sexpr indicate the presence of
|
||||
// extra ignored args that need not appear in the replacement
|
||||
|
||||
// extra conditions is just a chunk of Go that evaluates to a boolean. It may use
|
||||
// variables declared in the matching sexpr. The variable "v" is predefined to be
|
||||
// variables declared in the matching tsexpr. The variable "v" is predefined to be
|
||||
// the value matched by the entire rule.
|
||||
|
||||
// If multiple rules match, the first one in file order is selected.
|
||||
|
|
@ -1019,6 +1023,19 @@ func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int,
|
|||
pos = v + ".Pos"
|
||||
}
|
||||
|
||||
// If the last argument is ___, it means "don't care about trailing arguments, really"
|
||||
// The likely/intended use is for rewrites that are too tricky to express in the existing pattern language
|
||||
// Do a length check early because long patterns fed short (ultimately not-matching) inputs will
|
||||
// do an indexing error in pattern-matching.
|
||||
if op.argLength == -1 {
|
||||
l := len(args)
|
||||
if l == 0 || args[l-1] != "___" {
|
||||
rr.add(breakf("len(%s.Args) != %d", v, l))
|
||||
} else if l > 1 && args[l-1] == "___" {
|
||||
rr.add(breakf("len(%s.Args) < %d", v, l-1))
|
||||
}
|
||||
}
|
||||
|
||||
for _, e := range []struct {
|
||||
name, field, dclType string
|
||||
}{
|
||||
|
|
@ -1159,9 +1176,6 @@ func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int,
|
|||
}
|
||||
}
|
||||
|
||||
if op.argLength == -1 {
|
||||
rr.add(breakf("len(%s.Args) != %d", v, len(args)))
|
||||
}
|
||||
return pos, checkOp
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -28,18 +28,23 @@ type HTMLWriter struct {
|
|||
}
|
||||
|
||||
func NewHTMLWriter(path string, f *Func, cfgMask string) *HTMLWriter {
|
||||
path = strings.Replace(path, "/", string(filepath.Separator), -1)
|
||||
out, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
|
||||
if err != nil {
|
||||
f.Fatalf("%v", err)
|
||||
}
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
f.Fatalf("%v", err)
|
||||
reportPath := path
|
||||
if !filepath.IsAbs(reportPath) {
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
f.Fatalf("%v", err)
|
||||
}
|
||||
reportPath = filepath.Join(pwd, path)
|
||||
}
|
||||
html := HTMLWriter{
|
||||
w: out,
|
||||
Func: f,
|
||||
path: filepath.Join(pwd, path),
|
||||
path: reportPath,
|
||||
dot: newDotWriter(cfgMask),
|
||||
}
|
||||
html.start()
|
||||
|
|
|
|||
|
|
@ -127,6 +127,17 @@ func (a *AuxCall) NResults() int64 {
|
|||
return int64(len(a.results))
|
||||
}
|
||||
|
||||
// LateExpansionResultType returns the result type (including trailing mem)
|
||||
// for a call that will be expanded later in the SSA phase.
|
||||
func (a *AuxCall) LateExpansionResultType() *types.Type {
|
||||
var tys []*types.Type
|
||||
for i := int64(0); i < a.NResults(); i++ {
|
||||
tys = append(tys, a.TypeOfResult(i))
|
||||
}
|
||||
tys = append(tys, types.TypeMem)
|
||||
return types.NewResults(tys)
|
||||
}
|
||||
|
||||
// NArgs returns the number of arguments
|
||||
func (a *AuxCall) NArgs() int64 {
|
||||
return int64(len(a.args))
|
||||
|
|
@ -297,6 +308,13 @@ func makeValAndOff32(val, off int32) ValAndOff {
|
|||
return ValAndOff(int64(val)<<32 + int64(uint32(off)))
|
||||
}
|
||||
|
||||
func makeValAndOff64(val, off int64) ValAndOff {
|
||||
if !validValAndOff(val, off) {
|
||||
panic("invalid makeValAndOff64")
|
||||
}
|
||||
return ValAndOff(val<<32 + int64(uint32(off)))
|
||||
}
|
||||
|
||||
func (x ValAndOff) canAdd(off int64) bool {
|
||||
newoff := x.Off() + off
|
||||
return newoff == int64(int32(newoff))
|
||||
|
|
|
|||
|
|
@ -536,7 +536,6 @@ const (
|
|||
Op386FlagLT_UGT
|
||||
Op386FlagGT_UGT
|
||||
Op386FlagGT_ULT
|
||||
Op386FCHS
|
||||
Op386MOVSSconst1
|
||||
Op386MOVSDconst1
|
||||
Op386MOVSSconst2
|
||||
|
|
@ -1833,6 +1832,8 @@ const (
|
|||
OpPPC64FSUBS
|
||||
OpPPC64MULLD
|
||||
OpPPC64MULLW
|
||||
OpPPC64MULLDconst
|
||||
OpPPC64MULLWconst
|
||||
OpPPC64MADDLD
|
||||
OpPPC64MULHD
|
||||
OpPPC64MULHW
|
||||
|
|
@ -1865,6 +1866,7 @@ const (
|
|||
OpPPC64SLWconst
|
||||
OpPPC64ROTLconst
|
||||
OpPPC64ROTLWconst
|
||||
OpPPC64EXTSWSLconst
|
||||
OpPPC64CNTLZD
|
||||
OpPPC64CNTLZW
|
||||
OpPPC64CNTTZD
|
||||
|
|
@ -2731,7 +2733,9 @@ const (
|
|||
OpClosureCall
|
||||
OpStaticCall
|
||||
OpInterCall
|
||||
OpClosureLECall
|
||||
OpStaticLECall
|
||||
OpInterLECall
|
||||
OpSignExt8to16
|
||||
OpSignExt8to32
|
||||
OpSignExt8to64
|
||||
|
|
@ -6057,18 +6061,6 @@ var opcodeTable = [...]opInfo{
|
|||
argLen: 0,
|
||||
reg: regInfo{},
|
||||
},
|
||||
{
|
||||
name: "FCHS",
|
||||
argLen: 1,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 65280}, // X0 X1 X2 X3 X4 X5 X6 X7
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 65280}, // X0 X1 X2 X3 X4 X5 X6 X7
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "MOVSSconst1",
|
||||
auxType: auxFloat32,
|
||||
|
|
@ -24387,6 +24379,34 @@ var opcodeTable = [...]opInfo{
|
|||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "MULLDconst",
|
||||
auxType: auxInt32,
|
||||
argLen: 1,
|
||||
asm: ppc64.AMULLD,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 1073733630}, // SP SB R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "MULLWconst",
|
||||
auxType: auxInt32,
|
||||
argLen: 1,
|
||||
asm: ppc64.AMULLW,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 1073733630}, // SP SB R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "MADDLD",
|
||||
argLen: 3,
|
||||
|
|
@ -24849,6 +24869,20 @@ var opcodeTable = [...]opInfo{
|
|||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "EXTSWSLconst",
|
||||
auxType: auxInt64,
|
||||
argLen: 1,
|
||||
asm: ppc64.AEXTSWSLI,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 1073733630}, // SP SB R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CNTLZD",
|
||||
argLen: 1,
|
||||
|
|
@ -34836,6 +34870,13 @@ var opcodeTable = [...]opInfo{
|
|||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "ClosureLECall",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "StaticLECall",
|
||||
auxType: auxCallOff,
|
||||
|
|
@ -34843,6 +34884,13 @@ var opcodeTable = [...]opInfo{
|
|||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "InterLECall",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "SignExt8to16",
|
||||
argLen: 1,
|
||||
|
|
|
|||
|
|
@ -625,9 +625,6 @@ func (s *regAllocState) init(f *Func) {
|
|||
s.f.fe.Fatalf(src.NoXPos, "arch %s not implemented", s.f.Config.arch)
|
||||
}
|
||||
}
|
||||
if s.f.Config.use387 {
|
||||
s.allocatable &^= 1 << 15 // X7 disallowed (one 387 register is used as scratch space during SSE->387 generation in ../x86/387.go)
|
||||
}
|
||||
|
||||
// Linear scan register allocation can be influenced by the order in which blocks appear.
|
||||
// Decouple the register allocation order from the generated block order.
|
||||
|
|
@ -1024,9 +1021,6 @@ func (s *regAllocState) regalloc(f *Func) {
|
|||
if phiRegs[i] != noRegister {
|
||||
continue
|
||||
}
|
||||
if s.f.Config.use387 && v.Type.IsFloat() {
|
||||
continue // 387 can't handle floats in registers between blocks
|
||||
}
|
||||
m := s.compatRegs(v.Type) &^ phiUsed &^ s.used
|
||||
if m != 0 {
|
||||
r := pickReg(m)
|
||||
|
|
@ -1528,11 +1522,6 @@ func (s *regAllocState) regalloc(f *Func) {
|
|||
s.freeUseRecords = u
|
||||
}
|
||||
|
||||
// Spill any values that can't live across basic block boundaries.
|
||||
if s.f.Config.use387 {
|
||||
s.freeRegs(s.f.Config.fpRegMask)
|
||||
}
|
||||
|
||||
// If we are approaching a merge point and we are the primary
|
||||
// predecessor of it, find live values that we use soon after
|
||||
// the merge point and promote them to registers now.
|
||||
|
|
@ -1562,9 +1551,6 @@ func (s *regAllocState) regalloc(f *Func) {
|
|||
continue
|
||||
}
|
||||
v := s.orig[vid]
|
||||
if s.f.Config.use387 && v.Type.IsFloat() {
|
||||
continue // 387 can't handle floats in registers between blocks
|
||||
}
|
||||
m := s.compatRegs(v.Type) &^ s.used
|
||||
if m&^desired.avoid != 0 {
|
||||
m &^= desired.avoid
|
||||
|
|
|
|||
|
|
@ -395,7 +395,8 @@ func canMergeLoad(target, load *Value) bool {
|
|||
|
||||
// isSameCall reports whether sym is the same as the given named symbol
|
||||
func isSameCall(sym interface{}, name string) bool {
|
||||
return sym.(*AuxCall).Fn.String() == name
|
||||
fn := sym.(*AuxCall).Fn
|
||||
return fn != nil && fn.String() == name
|
||||
}
|
||||
|
||||
// nlz returns the number of leading zeros.
|
||||
|
|
@ -764,6 +765,36 @@ func devirt(v *Value, aux interface{}, sym Sym, offset int64) *AuxCall {
|
|||
return StaticAuxCall(lsym, va.args, va.results)
|
||||
}
|
||||
|
||||
// de-virtualize an InterLECall
|
||||
// 'sym' is the symbol for the itab
|
||||
func devirtLESym(v *Value, aux interface{}, sym Sym, offset int64) *obj.LSym {
|
||||
n, ok := sym.(*obj.LSym)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
f := v.Block.Func
|
||||
lsym := f.fe.DerefItab(n, offset)
|
||||
if f.pass.debug > 0 {
|
||||
if lsym != nil {
|
||||
f.Warnl(v.Pos, "de-virtualizing call")
|
||||
} else {
|
||||
f.Warnl(v.Pos, "couldn't de-virtualize call")
|
||||
}
|
||||
}
|
||||
if lsym == nil {
|
||||
return nil
|
||||
}
|
||||
return lsym
|
||||
}
|
||||
|
||||
func devirtLECall(v *Value, sym *obj.LSym) *Value {
|
||||
v.Op = OpStaticLECall
|
||||
v.Aux.(*AuxCall).Fn = sym
|
||||
v.RemoveArg(0)
|
||||
return v
|
||||
}
|
||||
|
||||
// isSamePtr reports whether p1 and p2 point to the same address.
|
||||
func isSamePtr(p1, p2 *Value) bool {
|
||||
if p1 == p2 {
|
||||
|
|
@ -1350,8 +1381,8 @@ func GetPPC64Shiftme(auxint int64) int64 {
|
|||
return int64(int8(auxint))
|
||||
}
|
||||
|
||||
// Catch the simple ones first
|
||||
// TODO: Later catch more cases
|
||||
// This verifies that the mask occupies the
|
||||
// rightmost bits.
|
||||
func isPPC64ValidShiftMask(v int64) bool {
|
||||
if ((v + 1) & v) == 0 {
|
||||
return true
|
||||
|
|
|
|||
|
|
@ -1310,10 +1310,8 @@ func rewriteValue386_Op386ADDLmodify(v *Value) bool {
|
|||
func rewriteValue386_Op386ADDSD(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (ADDSD x l:(MOVSDload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (ADDSDload x [off] {sym} ptr mem)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
|
|
@ -1326,7 +1324,7 @@ func rewriteValue386_Op386ADDSD(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
continue
|
||||
}
|
||||
v.reset(Op386ADDSDload)
|
||||
|
|
@ -1395,10 +1393,8 @@ func rewriteValue386_Op386ADDSDload(v *Value) bool {
|
|||
func rewriteValue386_Op386ADDSS(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (ADDSS x l:(MOVSSload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (ADDSSload x [off] {sym} ptr mem)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
|
|
@ -1411,7 +1407,7 @@ func rewriteValue386_Op386ADDSS(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
continue
|
||||
}
|
||||
v.reset(Op386ADDSSload)
|
||||
|
|
@ -2640,10 +2636,8 @@ func rewriteValue386_Op386CMPWload(v *Value) bool {
|
|||
func rewriteValue386_Op386DIVSD(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (DIVSD x l:(MOVSDload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (DIVSDload x [off] {sym} ptr mem)
|
||||
for {
|
||||
x := v_0
|
||||
|
|
@ -2655,7 +2649,7 @@ func rewriteValue386_Op386DIVSD(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386DIVSDload)
|
||||
|
|
@ -2722,10 +2716,8 @@ func rewriteValue386_Op386DIVSDload(v *Value) bool {
|
|||
func rewriteValue386_Op386DIVSS(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (DIVSS x l:(MOVSSload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (DIVSSload x [off] {sym} ptr mem)
|
||||
for {
|
||||
x := v_0
|
||||
|
|
@ -2737,7 +2729,7 @@ func rewriteValue386_Op386DIVSS(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386DIVSSload)
|
||||
|
|
@ -6104,10 +6096,8 @@ func rewriteValue386_Op386MULLload(v *Value) bool {
|
|||
func rewriteValue386_Op386MULSD(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (MULSD x l:(MOVSDload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (MULSDload x [off] {sym} ptr mem)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
|
|
@ -6120,7 +6110,7 @@ func rewriteValue386_Op386MULSD(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
continue
|
||||
}
|
||||
v.reset(Op386MULSDload)
|
||||
|
|
@ -6189,10 +6179,8 @@ func rewriteValue386_Op386MULSDload(v *Value) bool {
|
|||
func rewriteValue386_Op386MULSS(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (MULSS x l:(MOVSSload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (MULSSload x [off] {sym} ptr mem)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
|
|
@ -6205,7 +6193,7 @@ func rewriteValue386_Op386MULSS(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
continue
|
||||
}
|
||||
v.reset(Op386MULSSload)
|
||||
|
|
@ -8187,10 +8175,8 @@ func rewriteValue386_Op386SUBLmodify(v *Value) bool {
|
|||
func rewriteValue386_Op386SUBSD(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (SUBSD x l:(MOVSDload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (SUBSDload x [off] {sym} ptr mem)
|
||||
for {
|
||||
x := v_0
|
||||
|
|
@ -8202,7 +8188,7 @@ func rewriteValue386_Op386SUBSD(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386SUBSDload)
|
||||
|
|
@ -8269,10 +8255,8 @@ func rewriteValue386_Op386SUBSDload(v *Value) bool {
|
|||
func rewriteValue386_Op386SUBSS(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (SUBSS x l:(MOVSSload [off] {sym} ptr mem))
|
||||
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
|
||||
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
|
||||
// result: (SUBSSload x [off] {sym} ptr mem)
|
||||
for {
|
||||
x := v_0
|
||||
|
|
@ -8284,7 +8268,7 @@ func rewriteValue386_Op386SUBSS(v *Value) bool {
|
|||
sym := auxToSym(l.Aux)
|
||||
mem := l.Args[1]
|
||||
ptr := l.Args[0]
|
||||
if !(canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)) {
|
||||
if !(canMergeLoadClobber(v, l, x) && clobber(l)) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386SUBSSload)
|
||||
|
|
@ -10043,68 +10027,32 @@ func rewriteValue386_OpMove(v *Value) bool {
|
|||
func rewriteValue386_OpNeg32F(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
typ := &b.Func.Config.Types
|
||||
// match: (Neg32F x)
|
||||
// cond: !config.use387
|
||||
// result: (PXOR x (MOVSSconst <typ.Float32> [float32(math.Copysign(0, -1))]))
|
||||
for {
|
||||
x := v_0
|
||||
if !(!config.use387) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386PXOR)
|
||||
v0 := b.NewValue0(v.Pos, Op386MOVSSconst, typ.Float32)
|
||||
v0.AuxInt = float32ToAuxInt(float32(math.Copysign(0, -1)))
|
||||
v.AddArg2(x, v0)
|
||||
return true
|
||||
}
|
||||
// match: (Neg32F x)
|
||||
// cond: config.use387
|
||||
// result: (FCHS x)
|
||||
for {
|
||||
x := v_0
|
||||
if !(config.use387) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386FCHS)
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValue386_OpNeg64F(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
typ := &b.Func.Config.Types
|
||||
// match: (Neg64F x)
|
||||
// cond: !config.use387
|
||||
// result: (PXOR x (MOVSDconst <typ.Float64> [math.Copysign(0, -1)]))
|
||||
for {
|
||||
x := v_0
|
||||
if !(!config.use387) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386PXOR)
|
||||
v0 := b.NewValue0(v.Pos, Op386MOVSDconst, typ.Float64)
|
||||
v0.AuxInt = float64ToAuxInt(math.Copysign(0, -1))
|
||||
v.AddArg2(x, v0)
|
||||
return true
|
||||
}
|
||||
// match: (Neg64F x)
|
||||
// cond: config.use387
|
||||
// result: (FCHS x)
|
||||
for {
|
||||
x := v_0
|
||||
if !(config.use387) {
|
||||
break
|
||||
}
|
||||
v.reset(Op386FCHS)
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValue386_OpNeq16(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -568,6 +568,10 @@ func rewriteValuePPC64(v *Value) bool {
|
|||
return rewriteValuePPC64_OpPPC64MOVWstorezero(v)
|
||||
case OpPPC64MTVSRD:
|
||||
return rewriteValuePPC64_OpPPC64MTVSRD(v)
|
||||
case OpPPC64MULLD:
|
||||
return rewriteValuePPC64_OpPPC64MULLD(v)
|
||||
case OpPPC64MULLW:
|
||||
return rewriteValuePPC64_OpPPC64MULLW(v)
|
||||
case OpPPC64NEG:
|
||||
return rewriteValuePPC64_OpPPC64NEG(v)
|
||||
case OpPPC64NOR:
|
||||
|
|
@ -11003,6 +11007,56 @@ func rewriteValuePPC64_OpPPC64MTVSRD(v *Value) bool {
|
|||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuePPC64_OpPPC64MULLD(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
// match: (MULLD x (MOVDconst [c]))
|
||||
// cond: is16Bit(c)
|
||||
// result: (MULLDconst [int32(c)] x)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
x := v_0
|
||||
if v_1.Op != OpPPC64MOVDconst {
|
||||
continue
|
||||
}
|
||||
c := auxIntToInt64(v_1.AuxInt)
|
||||
if !(is16Bit(c)) {
|
||||
continue
|
||||
}
|
||||
v.reset(OpPPC64MULLDconst)
|
||||
v.AuxInt = int32ToAuxInt(int32(c))
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
break
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuePPC64_OpPPC64MULLW(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
// match: (MULLW x (MOVDconst [c]))
|
||||
// cond: is16Bit(c)
|
||||
// result: (MULLWconst [int32(c)] x)
|
||||
for {
|
||||
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
|
||||
x := v_0
|
||||
if v_1.Op != OpPPC64MOVDconst {
|
||||
continue
|
||||
}
|
||||
c := auxIntToInt64(v_1.AuxInt)
|
||||
if !(is16Bit(c)) {
|
||||
continue
|
||||
}
|
||||
v.reset(OpPPC64MULLWconst)
|
||||
v.AuxInt = int32ToAuxInt(int32(c))
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
break
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuePPC64_OpPPC64NEG(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
// match: (NEG (ADDconst [c] x))
|
||||
|
|
@ -12831,7 +12885,7 @@ func rewriteValuePPC64_OpPPC64SLDconst(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (SLDconst [c] z:(ANDconst [d] x))
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d)
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d))
|
||||
// result: (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
|
|
@ -12841,7 +12895,7 @@ func rewriteValuePPC64_OpPPC64SLDconst(v *Value) bool {
|
|||
}
|
||||
d := auxIntToInt64(z.AuxInt)
|
||||
x := z.Args[0]
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d)) {
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d))) {
|
||||
break
|
||||
}
|
||||
v.reset(OpPPC64CLRLSLDI)
|
||||
|
|
@ -12850,7 +12904,7 @@ func rewriteValuePPC64_OpPPC64SLDconst(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (SLDconst [c] z:(AND (MOVDconst [d]) x))
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d)
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(64-getPPC64ShiftMaskLength(d))
|
||||
// result: (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
|
|
@ -12867,7 +12921,7 @@ func rewriteValuePPC64_OpPPC64SLDconst(v *Value) bool {
|
|||
}
|
||||
d := auxIntToInt64(z_0.AuxInt)
|
||||
x := z_1
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d)) {
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d))) {
|
||||
continue
|
||||
}
|
||||
v.reset(OpPPC64CLRLSLDI)
|
||||
|
|
@ -12877,6 +12931,24 @@ func rewriteValuePPC64_OpPPC64SLDconst(v *Value) bool {
|
|||
}
|
||||
break
|
||||
}
|
||||
// match: (SLDconst [c] z:(MOVWreg x))
|
||||
// cond: c < 32 && objabi.GOPPC64 >= 9
|
||||
// result: (EXTSWSLconst [c] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
z := v_0
|
||||
if z.Op != OpPPC64MOVWreg {
|
||||
break
|
||||
}
|
||||
x := z.Args[0]
|
||||
if !(c < 32 && objabi.GOPPC64 >= 9) {
|
||||
break
|
||||
}
|
||||
v.reset(OpPPC64EXTSWSLconst)
|
||||
v.AuxInt = int64ToAuxInt(c)
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuePPC64_OpPPC64SLW(v *Value) bool {
|
||||
|
|
@ -12935,26 +13007,8 @@ func rewriteValuePPC64_OpPPC64SLWconst(v *Value) bool {
|
|||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
// match: (SLWconst [c] z:(MOVWZreg x))
|
||||
// cond: z.Uses == 1 && c < 24
|
||||
// result: (CLRLSLWI [newPPC64ShiftAuxInt(c,8,31,32)] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
z := v_0
|
||||
if z.Op != OpPPC64MOVWZreg {
|
||||
break
|
||||
}
|
||||
x := z.Args[0]
|
||||
if !(z.Uses == 1 && c < 24) {
|
||||
break
|
||||
}
|
||||
v.reset(OpPPC64CLRLSLWI)
|
||||
v.AuxInt = int32ToAuxInt(newPPC64ShiftAuxInt(c, 8, 31, 32))
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
// match: (SLWconst [c] z:(ANDconst [d] x))
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d)
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d))
|
||||
// result: (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
|
|
@ -12964,7 +13018,7 @@ func rewriteValuePPC64_OpPPC64SLWconst(v *Value) bool {
|
|||
}
|
||||
d := auxIntToInt64(z.AuxInt)
|
||||
x := z.Args[0]
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d)) {
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (32-getPPC64ShiftMaskLength(d))) {
|
||||
break
|
||||
}
|
||||
v.reset(OpPPC64CLRLSLWI)
|
||||
|
|
@ -12973,7 +13027,7 @@ func rewriteValuePPC64_OpPPC64SLWconst(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (SLWconst [c] z:(AND (MOVDconst [d]) x))
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d)
|
||||
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d))
|
||||
// result: (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
|
|
@ -12990,7 +13044,7 @@ func rewriteValuePPC64_OpPPC64SLWconst(v *Value) bool {
|
|||
}
|
||||
d := auxIntToInt64(z_0.AuxInt)
|
||||
x := z_1
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d)) {
|
||||
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (32-getPPC64ShiftMaskLength(d))) {
|
||||
continue
|
||||
}
|
||||
v.reset(OpPPC64CLRLSLWI)
|
||||
|
|
@ -13000,6 +13054,24 @@ func rewriteValuePPC64_OpPPC64SLWconst(v *Value) bool {
|
|||
}
|
||||
break
|
||||
}
|
||||
// match: (SLWconst [c] z:(MOVWreg x))
|
||||
// cond: c < 32 && objabi.GOPPC64 >= 9
|
||||
// result: (EXTSWSLconst [c] x)
|
||||
for {
|
||||
c := auxIntToInt64(v.AuxInt)
|
||||
z := v_0
|
||||
if z.Op != OpPPC64MOVWreg {
|
||||
break
|
||||
}
|
||||
x := z.Args[0]
|
||||
if !(c < 32 && objabi.GOPPC64 >= 9) {
|
||||
break
|
||||
}
|
||||
v.reset(OpPPC64EXTSWSLconst)
|
||||
v.AuxInt = int64ToAuxInt(c)
|
||||
v.AddArg(x)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuePPC64_OpPPC64SRAD(v *Value) bool {
|
||||
|
|
|
|||
|
|
@ -124,6 +124,8 @@ func rewriteValuegeneric(v *Value) bool {
|
|||
return rewriteValuegeneric_OpIMake(v)
|
||||
case OpInterCall:
|
||||
return rewriteValuegeneric_OpInterCall(v)
|
||||
case OpInterLECall:
|
||||
return rewriteValuegeneric_OpInterLECall(v)
|
||||
case OpIsInBounds:
|
||||
return rewriteValuegeneric_OpIsInBounds(v)
|
||||
case OpIsNonNil:
|
||||
|
|
@ -366,6 +368,8 @@ func rewriteValuegeneric(v *Value) bool {
|
|||
return rewriteValuegeneric_OpSelect0(v)
|
||||
case OpSelect1:
|
||||
return rewriteValuegeneric_OpSelect1(v)
|
||||
case OpSelectN:
|
||||
return rewriteValuegeneric_OpSelectN(v)
|
||||
case OpSignExt16to32:
|
||||
return rewriteValuegeneric_OpSignExt16to32(v)
|
||||
case OpSignExt16to64:
|
||||
|
|
@ -8522,6 +8526,46 @@ func rewriteValuegeneric_OpInterCall(v *Value) bool {
|
|||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuegeneric_OpInterLECall(v *Value) bool {
|
||||
// match: (InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___)
|
||||
// cond: devirtLESym(v, auxCall, itab, off) != nil
|
||||
// result: devirtLECall(v, devirtLESym(v, auxCall, itab, off))
|
||||
for {
|
||||
if len(v.Args) < 1 {
|
||||
break
|
||||
}
|
||||
auxCall := auxToCall(v.Aux)
|
||||
v_0 := v.Args[0]
|
||||
if v_0.Op != OpLoad {
|
||||
break
|
||||
}
|
||||
v_0_0 := v_0.Args[0]
|
||||
if v_0_0.Op != OpOffPtr {
|
||||
break
|
||||
}
|
||||
off := auxIntToInt64(v_0_0.AuxInt)
|
||||
v_0_0_0 := v_0_0.Args[0]
|
||||
if v_0_0_0.Op != OpITab {
|
||||
break
|
||||
}
|
||||
v_0_0_0_0 := v_0_0_0.Args[0]
|
||||
if v_0_0_0_0.Op != OpIMake {
|
||||
break
|
||||
}
|
||||
v_0_0_0_0_0 := v_0_0_0_0.Args[0]
|
||||
if v_0_0_0_0_0.Op != OpAddr {
|
||||
break
|
||||
}
|
||||
itab := auxToSym(v_0_0_0_0_0.Aux)
|
||||
v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
|
||||
if v_0_0_0_0_0_0.Op != OpSB || !(devirtLESym(v, auxCall, itab, off) != nil) {
|
||||
break
|
||||
}
|
||||
v.copyOf(devirtLECall(v, devirtLESym(v, auxCall, itab, off)))
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuegeneric_OpIsInBounds(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -16082,6 +16126,38 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool {
|
|||
v.reset(OpInvalid)
|
||||
return true
|
||||
}
|
||||
// match: (NilCheck (SelectN [0] call:(StaticLECall _ _)) (SelectN [1] call))
|
||||
// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
|
||||
// result: (Invalid)
|
||||
for {
|
||||
if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0.Args[0]
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
|
||||
break
|
||||
}
|
||||
v.reset(OpInvalid)
|
||||
return true
|
||||
}
|
||||
// match: (NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) (SelectN [1] call))
|
||||
// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
|
||||
// result: (Invalid)
|
||||
for {
|
||||
if v_0.Op != OpOffPtr {
|
||||
break
|
||||
}
|
||||
v_0_0 := v_0.Args[0]
|
||||
if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0_0.Args[0]
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
|
||||
break
|
||||
}
|
||||
v.reset(OpInvalid)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuegeneric_OpNot(v *Value) bool {
|
||||
|
|
@ -18549,6 +18625,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
// match: (Phi (Const8 [c]) (Const8 [c]))
|
||||
// result: (Const8 [c])
|
||||
for {
|
||||
if len(v.Args) != 2 {
|
||||
break
|
||||
}
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
if v_0.Op != OpConst8 {
|
||||
|
|
@ -18556,7 +18635,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
}
|
||||
c := auxIntToInt8(v_0.AuxInt)
|
||||
v_1 := v.Args[1]
|
||||
if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c || len(v.Args) != 2 {
|
||||
if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c {
|
||||
break
|
||||
}
|
||||
v.reset(OpConst8)
|
||||
|
|
@ -18566,6 +18645,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
// match: (Phi (Const16 [c]) (Const16 [c]))
|
||||
// result: (Const16 [c])
|
||||
for {
|
||||
if len(v.Args) != 2 {
|
||||
break
|
||||
}
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
if v_0.Op != OpConst16 {
|
||||
|
|
@ -18573,7 +18655,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
}
|
||||
c := auxIntToInt16(v_0.AuxInt)
|
||||
v_1 := v.Args[1]
|
||||
if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c || len(v.Args) != 2 {
|
||||
if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c {
|
||||
break
|
||||
}
|
||||
v.reset(OpConst16)
|
||||
|
|
@ -18583,6 +18665,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
// match: (Phi (Const32 [c]) (Const32 [c]))
|
||||
// result: (Const32 [c])
|
||||
for {
|
||||
if len(v.Args) != 2 {
|
||||
break
|
||||
}
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
if v_0.Op != OpConst32 {
|
||||
|
|
@ -18590,7 +18675,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
}
|
||||
c := auxIntToInt32(v_0.AuxInt)
|
||||
v_1 := v.Args[1]
|
||||
if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c || len(v.Args) != 2 {
|
||||
if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c {
|
||||
break
|
||||
}
|
||||
v.reset(OpConst32)
|
||||
|
|
@ -18600,6 +18685,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
// match: (Phi (Const64 [c]) (Const64 [c]))
|
||||
// result: (Const64 [c])
|
||||
for {
|
||||
if len(v.Args) != 2 {
|
||||
break
|
||||
}
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
if v_0.Op != OpConst64 {
|
||||
|
|
@ -18607,7 +18695,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool {
|
|||
}
|
||||
c := auxIntToInt64(v_0.AuxInt)
|
||||
v_1 := v.Args[1]
|
||||
if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || len(v.Args) != 2 {
|
||||
if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c {
|
||||
break
|
||||
}
|
||||
v.reset(OpConst64)
|
||||
|
|
@ -20615,6 +20703,70 @@ func rewriteValuegeneric_OpSelect1(v *Value) bool {
|
|||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuegeneric_OpSelectN(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
|
||||
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
|
||||
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
|
||||
for {
|
||||
if auxIntToInt64(v.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 4 {
|
||||
break
|
||||
}
|
||||
sym := auxToCall(call.Aux)
|
||||
mem := call.Args[3]
|
||||
dst := call.Args[0]
|
||||
src := call.Args[1]
|
||||
call_2 := call.Args[2]
|
||||
if call_2.Op != OpConst64 {
|
||||
break
|
||||
}
|
||||
sz := auxIntToInt64(call_2.AuxInt)
|
||||
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
|
||||
break
|
||||
}
|
||||
v.reset(OpMove)
|
||||
v.AuxInt = int64ToAuxInt(int64(sz))
|
||||
v.Aux = typeToAux(dst.Type.Elem())
|
||||
v.AddArg3(dst, src, mem)
|
||||
return true
|
||||
}
|
||||
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
|
||||
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
|
||||
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
|
||||
for {
|
||||
if auxIntToInt64(v.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 4 {
|
||||
break
|
||||
}
|
||||
sym := auxToCall(call.Aux)
|
||||
mem := call.Args[3]
|
||||
dst := call.Args[0]
|
||||
src := call.Args[1]
|
||||
call_2 := call.Args[2]
|
||||
if call_2.Op != OpConst32 {
|
||||
break
|
||||
}
|
||||
sz := auxIntToInt32(call_2.AuxInt)
|
||||
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
|
||||
break
|
||||
}
|
||||
v.reset(OpMove)
|
||||
v.AuxInt = int64ToAuxInt(int64(sz))
|
||||
v.Aux = typeToAux(dst.Type.Elem())
|
||||
v.AddArg3(dst, src, mem)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func rewriteValuegeneric_OpSignExt16to32(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
// match: (SignExt16to32 (Const16 [c]))
|
||||
|
|
@ -21660,6 +21812,48 @@ func rewriteValuegeneric_OpStore(v *Value) bool {
|
|||
v.copyOf(mem)
|
||||
return true
|
||||
}
|
||||
// match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
|
||||
// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
|
||||
// result: mem
|
||||
for {
|
||||
if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0.Args[0]
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 2 {
|
||||
break
|
||||
}
|
||||
x := v_1
|
||||
mem := v_2
|
||||
if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
|
||||
break
|
||||
}
|
||||
v.copyOf(mem)
|
||||
return true
|
||||
}
|
||||
// match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
|
||||
// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
|
||||
// result: mem
|
||||
for {
|
||||
if v_0.Op != OpOffPtr {
|
||||
break
|
||||
}
|
||||
v_0_0 := v_0.Args[0]
|
||||
if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0_0.Args[0]
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 2 {
|
||||
break
|
||||
}
|
||||
x := v_1
|
||||
mem := v_2
|
||||
if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
|
||||
break
|
||||
}
|
||||
v.copyOf(mem)
|
||||
return true
|
||||
}
|
||||
// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
|
||||
// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
|
||||
// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
|
||||
|
|
@ -24357,6 +24551,24 @@ func rewriteValuegeneric_OpZero(v *Value) bool {
|
|||
v.copyOf(mem)
|
||||
return true
|
||||
}
|
||||
// match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
|
||||
// cond: isSameCall(call.Aux, "runtime.newobject")
|
||||
// result: mem
|
||||
for {
|
||||
if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
|
||||
break
|
||||
}
|
||||
call := v_0.Args[0]
|
||||
if call.Op != OpStaticLECall || len(call.Args) != 2 {
|
||||
break
|
||||
}
|
||||
mem := v_1
|
||||
if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) {
|
||||
break
|
||||
}
|
||||
v.copyOf(mem)
|
||||
return true
|
||||
}
|
||||
// match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
|
||||
// cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store)
|
||||
// result: (Zero {t1} [n] p1 mem)
|
||||
|
|
|
|||
|
|
@ -348,6 +348,9 @@ func (v *Value) reset(op Op) {
|
|||
// It modifies v to be (Copy a).
|
||||
//go:noinline
|
||||
func (v *Value) copyOf(a *Value) {
|
||||
if v == a {
|
||||
return
|
||||
}
|
||||
if v.InCache {
|
||||
v.Block.Func.unCache(v)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -527,7 +527,7 @@ func IsStackAddr(v *Value) bool {
|
|||
v = v.Args[0]
|
||||
}
|
||||
switch v.Op {
|
||||
case OpSP, OpLocalAddr:
|
||||
case OpSP, OpLocalAddr, OpSelectNAddr:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
|
@ -593,7 +593,7 @@ func IsSanitizerSafeAddr(v *Value) bool {
|
|||
v = v.Args[0]
|
||||
}
|
||||
switch v.Op {
|
||||
case OpSP, OpLocalAddr:
|
||||
case OpSP, OpLocalAddr, OpSelectNAddr:
|
||||
// Stack addresses are always safe.
|
||||
return true
|
||||
case OpITab, OpStringPtr, OpGetClosurePtr:
|
||||
|
|
@ -609,7 +609,7 @@ func IsSanitizerSafeAddr(v *Value) bool {
|
|||
// isVolatile reports whether v is a pointer to argument region on stack which
|
||||
// will be clobbered by a function call.
|
||||
func isVolatile(v *Value) bool {
|
||||
for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
|
||||
for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy || v.Op == OpSelectNAddr {
|
||||
v = v.Args[0]
|
||||
}
|
||||
return v.Op == OpSP
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue