mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
more lgtm files from gofmt
R=gri OCL=35485 CL=35488
This commit is contained in:
parent
8231548365
commit
094f1d5990
78 changed files with 1517 additions and 1441 deletions
|
|
@ -6,13 +6,13 @@
|
||||||
|
|
||||||
// Web server tree:
|
// Web server tree:
|
||||||
//
|
//
|
||||||
// http://godoc/ main landing page
|
// http://godoc/ main landing page
|
||||||
// http://godoc/doc/ serve from $GOROOT/doc - spec, mem, tutorial, etc.
|
// http://godoc/doc/ serve from $GOROOT/doc - spec, mem, tutorial, etc.
|
||||||
// http://godoc/src/ serve files from $GOROOT/src; .go gets pretty-printed
|
// http://godoc/src/ serve files from $GOROOT/src; .go gets pretty-printed
|
||||||
// http://godoc/cmd/ serve documentation about commands (TODO)
|
// http://godoc/cmd/ serve documentation about commands (TODO)
|
||||||
// http://godoc/pkg/ serve documentation about packages
|
// http://godoc/pkg/ serve documentation about packages
|
||||||
// (idea is if you say import "compress/zlib", you go to
|
// (idea is if you say import "compress/zlib", you go to
|
||||||
// http://godoc/pkg/compress/zlib)
|
// http://godoc/pkg/compress/zlib)
|
||||||
//
|
//
|
||||||
// Command-line interface:
|
// Command-line interface:
|
||||||
//
|
//
|
||||||
|
|
@ -27,35 +27,35 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes";
|
"bytes";
|
||||||
"container/vector";
|
"container/vector";
|
||||||
"flag";
|
"flag";
|
||||||
"fmt";
|
"fmt";
|
||||||
"go/ast";
|
"go/ast";
|
||||||
"go/doc";
|
"go/doc";
|
||||||
"go/parser";
|
"go/parser";
|
||||||
"go/printer";
|
"go/printer";
|
||||||
"go/scanner";
|
"go/scanner";
|
||||||
"go/token";
|
"go/token";
|
||||||
"http";
|
"http";
|
||||||
"io";
|
"io";
|
||||||
"log";
|
"log";
|
||||||
"os";
|
"os";
|
||||||
pathutil "path";
|
pathutil "path";
|
||||||
"sort";
|
"sort";
|
||||||
"strings";
|
"strings";
|
||||||
"sync";
|
"sync";
|
||||||
"template";
|
"template";
|
||||||
"time";
|
"time";
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
const Pkg = "/pkg/"; // name for auto-generated package documentation tree
|
const Pkg = "/pkg/" // name for auto-generated package documentation tree
|
||||||
|
|
||||||
|
|
||||||
type delayTime struct {
|
type delayTime struct {
|
||||||
mutex sync.RWMutex;
|
mutex sync.RWMutex;
|
||||||
minutes int;
|
minutes int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -70,7 +70,7 @@ func (dt *delayTime) backoff(max int) {
|
||||||
dt.mutex.Lock();
|
dt.mutex.Lock();
|
||||||
dt.minutes *= 2;
|
dt.minutes *= 2;
|
||||||
if dt.minutes > max {
|
if dt.minutes > max {
|
||||||
dt.minutes = max
|
dt.minutes = max;
|
||||||
}
|
}
|
||||||
dt.mutex.Unlock();
|
dt.mutex.Unlock();
|
||||||
}
|
}
|
||||||
|
|
@ -84,8 +84,8 @@ func (dt *delayTime) get() int {
|
||||||
|
|
||||||
|
|
||||||
type timeStamp struct {
|
type timeStamp struct {
|
||||||
mutex sync.RWMutex;
|
mutex sync.RWMutex;
|
||||||
seconds int64;
|
seconds int64;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -104,25 +104,25 @@ func (ts *timeStamp) get() int64 {
|
||||||
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
verbose = flag.Bool("v", false, "verbose mode");
|
verbose = flag.Bool("v", false, "verbose mode");
|
||||||
|
|
||||||
// file system roots
|
// file system roots
|
||||||
goroot string;
|
goroot string;
|
||||||
pkgroot = flag.String("pkgroot", "src/pkg", "root package source directory (if unrooted, relative to goroot)");
|
pkgroot = flag.String("pkgroot", "src/pkg", "root package source directory (if unrooted, relative to goroot)");
|
||||||
tmplroot = flag.String("tmplroot", "lib/godoc", "root template directory (if unrooted, relative to goroot)");
|
tmplroot = flag.String("tmplroot", "lib/godoc", "root template directory (if unrooted, relative to goroot)");
|
||||||
|
|
||||||
// periodic sync
|
// periodic sync
|
||||||
syncCmd = flag.String("sync", "", "sync command; disabled if empty");
|
syncCmd = flag.String("sync", "", "sync command; disabled if empty");
|
||||||
syncMin = flag.Int("sync_minutes", 0, "sync interval in minutes; disabled if <= 0");
|
syncMin = flag.Int("sync_minutes", 0, "sync interval in minutes; disabled if <= 0");
|
||||||
syncDelay delayTime; // actual sync delay in minutes; usually syncDelay == syncMin, but delay may back off exponentially
|
syncDelay delayTime; // actual sync delay in minutes; usually syncDelay == syncMin, but delay may back off exponentially
|
||||||
syncTime timeStamp; // time of last p4 sync
|
syncTime timeStamp; // time of last p4 sync
|
||||||
|
|
||||||
// layout control
|
// layout control
|
||||||
tabwidth = flag.Int("tabwidth", 4, "tab width");
|
tabwidth = flag.Int("tabwidth", 4, "tab width");
|
||||||
html = flag.Bool("html", false, "print HTML in command-line mode");
|
html = flag.Bool("html", false, "print HTML in command-line mode");
|
||||||
|
|
||||||
// server control
|
// server control
|
||||||
httpaddr = flag.String("http", "", "HTTP service address (e.g., ':6060')");
|
httpaddr = flag.String("http", "", "HTTP service address (e.g., ':6060')");
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -132,7 +132,7 @@ func init() {
|
||||||
goroot = "/home/r/go-release/go";
|
goroot = "/home/r/go-release/go";
|
||||||
}
|
}
|
||||||
flag.StringVar(&goroot, "goroot", goroot, "Go root directory");
|
flag.StringVar(&goroot, "goroot", goroot, "Go root directory");
|
||||||
syncTime.set(); // have a reasonable initial value
|
syncTime.set(); // have a reasonable initial value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -140,11 +140,10 @@ func init() {
|
||||||
// Support
|
// Support
|
||||||
|
|
||||||
func isGoFile(dir *os.Dir) bool {
|
func isGoFile(dir *os.Dir) bool {
|
||||||
return
|
return dir.IsRegular() &&
|
||||||
dir.IsRegular() &&
|
!strings.HasPrefix(dir.Name, ".") && // ignore .files
|
||||||
!strings.HasPrefix(dir.Name, ".") && // ignore .files
|
|
||||||
pathutil.Ext(dir.Name) == ".go" &&
|
pathutil.Ext(dir.Name) == ".go" &&
|
||||||
!strings.HasSuffix(dir.Name, "_test.go"); // ignore test files
|
!strings.HasSuffix(dir.Name, "_test.go"); // ignore test files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -158,9 +157,9 @@ func isPkgDir(dir *os.Dir) bool {
|
||||||
|
|
||||||
// A single error in the parsed file.
|
// A single error in the parsed file.
|
||||||
type parseError struct {
|
type parseError struct {
|
||||||
src []byte; // source before error
|
src []byte; // source before error
|
||||||
line int; // line number of error
|
line int; // line number of error
|
||||||
msg string; // error message
|
msg string; // error message
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -171,9 +170,9 @@ type parseError struct {
|
||||||
// This data structure is handed to the templates parseerror.txt and parseerror.html.
|
// This data structure is handed to the templates parseerror.txt and parseerror.html.
|
||||||
//
|
//
|
||||||
type parseErrors struct {
|
type parseErrors struct {
|
||||||
filename string; // path to file
|
filename string; // path to file
|
||||||
list []parseError; // the errors
|
list []parseError; // the errors
|
||||||
src []byte; // the file's entire source code
|
src []byte; // the file's entire source code
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -196,7 +195,7 @@ func parse(path string, mode uint) (*ast.File, *parseErrors) {
|
||||||
// TODO(gri) If the file contains //line comments, the errors
|
// TODO(gri) If the file contains //line comments, the errors
|
||||||
// may not be sorted in increasing file offset value
|
// may not be sorted in increasing file offset value
|
||||||
// which will lead to incorrect output.
|
// which will lead to incorrect output.
|
||||||
errs = make([]parseError, len(errors) + 1); // +1 for final fragment of source
|
errs = make([]parseError, len(errors)+1); // +1 for final fragment of source
|
||||||
offs := 0;
|
offs := 0;
|
||||||
for i, r := range errors {
|
for i, r := range errors {
|
||||||
// Should always be true, but check for robustness.
|
// Should always be true, but check for robustness.
|
||||||
|
|
@ -207,7 +206,7 @@ func parse(path string, mode uint) (*ast.File, *parseErrors) {
|
||||||
errs[i].line = r.Pos.Line;
|
errs[i].line = r.Pos.Line;
|
||||||
errs[i].msg = r.Msg;
|
errs[i].msg = r.Msg;
|
||||||
}
|
}
|
||||||
errs[len(errors)].src = src[offs : len(src)];
|
errs[len(errors)].src = src[offs:len(src)];
|
||||||
} else {
|
} else {
|
||||||
// single error of unspecified type
|
// single error of unspecified type
|
||||||
errs = make([]parseError, 2);
|
errs = make([]parseError, 2);
|
||||||
|
|
@ -289,7 +288,9 @@ func textFmt(w io.Writer, x interface{}, format string) {
|
||||||
|
|
||||||
// Template formatter for "link" format.
|
// Template formatter for "link" format.
|
||||||
func linkFmt(w io.Writer, x interface{}, format string) {
|
func linkFmt(w io.Writer, x interface{}, format string) {
|
||||||
type Positioner interface { Pos() token.Position }
|
type Positioner interface {
|
||||||
|
Pos() token.Position;
|
||||||
|
}
|
||||||
if node, ok := x.(Positioner); ok {
|
if node, ok := x.(Positioner); ok {
|
||||||
pos := node.Pos();
|
pos := node.Pos();
|
||||||
if pos.IsValid() {
|
if pos.IsValid() {
|
||||||
|
|
@ -326,8 +327,8 @@ func readTemplate(name string) *template.Template {
|
||||||
var godocHtml *template.Template
|
var godocHtml *template.Template
|
||||||
var packageHtml *template.Template
|
var packageHtml *template.Template
|
||||||
var packageText *template.Template
|
var packageText *template.Template
|
||||||
var parseerrorHtml *template.Template;
|
var parseerrorHtml *template.Template
|
||||||
var parseerrorText *template.Template;
|
var parseerrorText *template.Template
|
||||||
|
|
||||||
func readTemplates() {
|
func readTemplates() {
|
||||||
// have to delay until after flags processing,
|
// have to delay until after flags processing,
|
||||||
|
|
@ -345,9 +346,9 @@ func readTemplates() {
|
||||||
|
|
||||||
func servePage(c *http.Conn, title, content interface{}) {
|
func servePage(c *http.Conn, title, content interface{}) {
|
||||||
type Data struct {
|
type Data struct {
|
||||||
title interface{};
|
title interface{};
|
||||||
timestamp string;
|
timestamp string;
|
||||||
content interface{};
|
content interface{};
|
||||||
}
|
}
|
||||||
|
|
||||||
d := Data{
|
d := Data{
|
||||||
|
|
@ -372,8 +373,8 @@ func serveText(c *http.Conn, text []byte) {
|
||||||
// Files
|
// Files
|
||||||
|
|
||||||
var (
|
var (
|
||||||
tagBegin = strings.Bytes("<!--");
|
tagBegin = strings.Bytes("<!--");
|
||||||
tagEnd = strings.Bytes("-->");
|
tagEnd = strings.Bytes("-->");
|
||||||
)
|
)
|
||||||
|
|
||||||
// commentText returns the text of the first HTML comment in src.
|
// commentText returns the text of the first HTML comment in src.
|
||||||
|
|
@ -436,7 +437,7 @@ func serveGoSource(c *http.Conn, filename string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
var fileServer = http.FileServer(".", "");
|
var fileServer = http.FileServer(".", "")
|
||||||
|
|
||||||
func serveFile(c *http.Conn, r *http.Request) {
|
func serveFile(c *http.Conn, r *http.Request) {
|
||||||
path := r.Url.Path;
|
path := r.Url.Path;
|
||||||
|
|
@ -471,9 +472,15 @@ func serveFile(c *http.Conn, r *http.Request) {
|
||||||
// TODO if we don't plan to use the directory information, simplify to []string
|
// TODO if we don't plan to use the directory information, simplify to []string
|
||||||
type dirList []*os.Dir
|
type dirList []*os.Dir
|
||||||
|
|
||||||
func (d dirList) Len() int { return len(d) }
|
func (d dirList) Len() int {
|
||||||
func (d dirList) Less(i, j int) bool { return d[i].Name < d[j].Name }
|
return len(d);
|
||||||
func (d dirList) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
|
}
|
||||||
|
func (d dirList) Less(i, j int) bool {
|
||||||
|
return d[i].Name < d[j].Name;
|
||||||
|
}
|
||||||
|
func (d dirList) Swap(i, j int) {
|
||||||
|
d[i], d[j] = d[j], d[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
func pkgName(filename string) string {
|
func pkgName(filename string) string {
|
||||||
|
|
@ -486,8 +493,8 @@ func pkgName(filename string) string {
|
||||||
|
|
||||||
|
|
||||||
type PageInfo struct {
|
type PageInfo struct {
|
||||||
PDoc *doc.PackageDoc; // nil if no package found
|
PDoc *doc.PackageDoc; // nil if no package found
|
||||||
Dirs dirList; // nil if no subdirectories found
|
Dirs dirList; // nil if no subdirectories found
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -542,7 +549,7 @@ func getPageInfo(path string) PageInfo {
|
||||||
var pdoc *doc.PackageDoc;
|
var pdoc *doc.PackageDoc;
|
||||||
if pkg != nil {
|
if pkg != nil {
|
||||||
ast.PackageExports(pkg);
|
ast.PackageExports(pkg);
|
||||||
pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(path)); // no trailing '/' in importpath
|
pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(path)); // no trailing '/' in importpath
|
||||||
}
|
}
|
||||||
|
|
||||||
return PageInfo{pdoc, subdirs};
|
return PageInfo{pdoc, subdirs};
|
||||||
|
|
@ -551,10 +558,10 @@ func getPageInfo(path string) PageInfo {
|
||||||
|
|
||||||
func servePkg(c *http.Conn, r *http.Request) {
|
func servePkg(c *http.Conn, r *http.Request) {
|
||||||
path := r.Url.Path;
|
path := r.Url.Path;
|
||||||
path = path[len(Pkg) : len(path)];
|
path = path[len(Pkg):len(path)];
|
||||||
|
|
||||||
// canonicalize URL path and redirect if necessary
|
// canonicalize URL path and redirect if necessary
|
||||||
if canonical := pathutil.Clean(Pkg + path) + "/"; r.Url.Path != canonical {
|
if canonical := pathutil.Clean(Pkg+path) + "/"; r.Url.Path != canonical {
|
||||||
http.Redirect(c, canonical, http.StatusMovedPermanently);
|
http.Redirect(c, canonical, http.StatusMovedPermanently);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -575,7 +582,7 @@ func servePkg(c *http.Conn, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if path == "" {
|
if path == "" {
|
||||||
path = "."; // don't display an empty path
|
path = "."; // don't display an empty path
|
||||||
}
|
}
|
||||||
title := "Directory " + path;
|
title := "Directory " + path;
|
||||||
if info.PDoc != nil {
|
if info.PDoc != nil {
|
||||||
|
|
@ -593,7 +600,7 @@ func loggingHandler(h http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(c *http.Conn, req *http.Request) {
|
return http.HandlerFunc(func(c *http.Conn, req *http.Request) {
|
||||||
log.Stderrf("%s\t%s", c.RemoteAddr, req.Url);
|
log.Stderrf("%s\t%s", c.RemoteAddr, req.Url);
|
||||||
h.ServeHTTP(c, req);
|
h.ServeHTTP(c, req);
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -648,7 +655,7 @@ func dosync(c *http.Conn, r *http.Request) {
|
||||||
if exec(c, args) {
|
if exec(c, args) {
|
||||||
// sync succeeded
|
// sync succeeded
|
||||||
syncTime.set();
|
syncTime.set();
|
||||||
syncDelay.set(*syncMin); // revert to regular sync schedule
|
syncDelay.set(*syncMin); // revert to regular sync schedule
|
||||||
} else {
|
} else {
|
||||||
// sync failed - back off exponentially, but try at least once a day
|
// sync failed - back off exponentially, but try at least once a day
|
||||||
syncDelay.backoff(24*60);
|
syncDelay.backoff(24*60);
|
||||||
|
|
@ -659,8 +666,7 @@ func dosync(c *http.Conn, r *http.Request) {
|
||||||
func usage() {
|
func usage() {
|
||||||
fmt.Fprintf(os.Stderr,
|
fmt.Fprintf(os.Stderr,
|
||||||
"usage: godoc package [name ...]\n"
|
"usage: godoc package [name ...]\n"
|
||||||
" godoc -http=:6060\n"
|
" godoc -http=:6060\n");
|
||||||
);
|
|
||||||
flag.PrintDefaults();
|
flag.PrintDefaults();
|
||||||
os.Exit(2);
|
os.Exit(2);
|
||||||
}
|
}
|
||||||
|
|
@ -712,20 +718,20 @@ func main() {
|
||||||
|
|
||||||
// Start sync goroutine, if enabled.
|
// Start sync goroutine, if enabled.
|
||||||
if *syncCmd != "" && *syncMin > 0 {
|
if *syncCmd != "" && *syncMin > 0 {
|
||||||
syncDelay.set(*syncMin); // initial sync delay
|
syncDelay.set(*syncMin); // initial sync delay
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for {
|
||||||
dosync(nil, nil);
|
dosync(nil, nil);
|
||||||
if *verbose {
|
if *verbose {
|
||||||
log.Stderrf("next sync in %dmin", syncDelay.get());
|
log.Stderrf("next sync in %dmin", syncDelay.get());
|
||||||
}
|
}
|
||||||
time.Sleep(int64(syncDelay.get()) * (60 * 1e9));
|
time.Sleep(int64(syncDelay.get())*(60*1e9));
|
||||||
}
|
}
|
||||||
}();
|
}();
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := http.ListenAndServe(*httpaddr, handler); err != nil {
|
if err := http.ListenAndServe(*httpaddr, handler); err != nil {
|
||||||
log.Exitf("ListenAndServe %s: %v", *httpaddr, err)
|
log.Exitf("ListenAndServe %s: %v", *httpaddr, err);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -739,7 +745,7 @@ func main() {
|
||||||
|
|
||||||
if info.PDoc != nil && flag.NArg() > 1 {
|
if info.PDoc != nil && flag.NArg() > 1 {
|
||||||
args := flag.Args();
|
args := flag.Args();
|
||||||
info.PDoc.Filter(args[1 : len(args)]);
|
info.PDoc.Filter(args[1:len(args)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := packageText.Execute(info, os.Stdout); err != nil {
|
if err := packageText.Execute(info, os.Stdout); err != nil {
|
||||||
|
|
|
||||||
|
|
@ -21,13 +21,13 @@ import (
|
||||||
|
|
||||||
|
|
||||||
type ebnfParser struct {
|
type ebnfParser struct {
|
||||||
out io.Writer; // parser output
|
out io.Writer; // parser output
|
||||||
src []byte; // parser source
|
src []byte; // parser source
|
||||||
scanner scanner.Scanner;
|
scanner scanner.Scanner;
|
||||||
prev int; // offset of previous token
|
prev int; // offset of previous token
|
||||||
pos token.Position; // token position
|
pos token.Position; // token position
|
||||||
tok token.Token; // one token look-ahead
|
tok token.Token; // one token look-ahead
|
||||||
lit []byte; // token literal
|
lit []byte; // token literal
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -48,7 +48,7 @@ func (p *ebnfParser) next() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (p *ebnfParser) Error (pos token.Position, msg string) {
|
func (p *ebnfParser) Error(pos token.Position, msg string) {
|
||||||
fmt.Fprintf(p.out, "<font color=red>error: %s</font>", msg);
|
fmt.Fprintf(p.out, "<font color=red>error: %s</font>", msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -60,7 +60,7 @@ func (p *ebnfParser) errorExpected(pos token.Position, msg string) {
|
||||||
// make the error message more specific
|
// make the error message more specific
|
||||||
msg += ", found '" + p.tok.String() + "'";
|
msg += ", found '" + p.tok.String() + "'";
|
||||||
if p.tok.IsLiteral() {
|
if p.tok.IsLiteral() {
|
||||||
msg += " " + string(p.lit);
|
msg += " "+string(p.lit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.Error(pos, msg);
|
p.Error(pos, msg);
|
||||||
|
|
@ -72,7 +72,7 @@ func (p *ebnfParser) expect(tok token.Token) token.Position {
|
||||||
if p.tok != tok {
|
if p.tok != tok {
|
||||||
p.errorExpected(pos, "'" + tok.String() + "'");
|
p.errorExpected(pos, "'" + tok.String() + "'");
|
||||||
}
|
}
|
||||||
p.next(); // make progress in any case
|
p.next(); // make progress in any case
|
||||||
return pos;
|
return pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -85,7 +85,7 @@ func (p *ebnfParser) parseIdentifier(def bool) {
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintf(p.out, `<a href="#%s" style="text-decoration: none;">%s</a>`, name, name);
|
fmt.Fprintf(p.out, `<a href="#%s" style="text-decoration: none;">%s</a>`, name, name);
|
||||||
}
|
}
|
||||||
p.prev += len(name); // skip identifier when calling flush
|
p.prev += len(name); // skip identifier when calling flush
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -125,8 +125,7 @@ func (p *ebnfParser) parseTerm() bool {
|
||||||
|
|
||||||
|
|
||||||
func (p *ebnfParser) parseSequence() {
|
func (p *ebnfParser) parseSequence() {
|
||||||
for p.parseTerm() {
|
for p.parseTerm() {}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -154,7 +153,7 @@ func (p *ebnfParser) parse(out io.Writer, src []byte) {
|
||||||
p.out = out;
|
p.out = out;
|
||||||
p.src = src;
|
p.src = src;
|
||||||
p.scanner.Init("", src, p, 0);
|
p.scanner.Init("", src, p, 0);
|
||||||
p.next(); // initializes pos, tok, lit
|
p.next(); // initializes pos, tok, lit
|
||||||
|
|
||||||
// process source
|
// process source
|
||||||
for p.tok != token.EOF {
|
for p.tok != token.EOF {
|
||||||
|
|
@ -166,8 +165,8 @@ func (p *ebnfParser) parse(out io.Writer, src []byte) {
|
||||||
|
|
||||||
// Markers around EBNF sections
|
// Markers around EBNF sections
|
||||||
var (
|
var (
|
||||||
open = strings.Bytes(`<pre class="ebnf">`);
|
open = strings.Bytes(`<pre class="ebnf">`);
|
||||||
close = strings.Bytes(`</pre>`);
|
close = strings.Bytes(`</pre>`);
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -183,19 +182,19 @@ func linkify(out io.Writer, src []byte) {
|
||||||
i += len(open);
|
i += len(open);
|
||||||
|
|
||||||
// j: end of EBNF text (or end of source)
|
// j: end of EBNF text (or end of source)
|
||||||
j := bytes.Index(src[i : n], close); // close marker
|
j := bytes.Index(src[i:n], close); // close marker
|
||||||
if j < 0 {
|
if j < 0 {
|
||||||
j = n-i;
|
j = n-i;
|
||||||
}
|
}
|
||||||
j += i;
|
j += i;
|
||||||
|
|
||||||
// write text before EBNF
|
// write text before EBNF
|
||||||
out.Write(src[0 : i]);
|
out.Write(src[0:i]);
|
||||||
// parse and write EBNF
|
// parse and write EBNF
|
||||||
var p ebnfParser;
|
var p ebnfParser;
|
||||||
p.parse(out, src[i : j]);
|
p.parse(out, src[i:j]);
|
||||||
|
|
||||||
// advance
|
// advance
|
||||||
src = src[j : n];
|
src = src[j:n];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -121,11 +121,11 @@ func (tr *Reader) readHeader() *Header {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Two blocks of zero bytes marks the end of the archive.
|
// Two blocks of zero bytes marks the end of the archive.
|
||||||
if bytes.Equal(header, zeroBlock[0:blockSize]) {
|
if bytes.Equal(header, zeroBlock[0 : blockSize]) {
|
||||||
if _, tr.err = io.ReadFull(tr.r, header); tr.err != nil {
|
if _, tr.err = io.ReadFull(tr.r, header); tr.err != nil {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
if !bytes.Equal(header, zeroBlock[0:blockSize]) {
|
if !bytes.Equal(header, zeroBlock[0 : blockSize]) {
|
||||||
tr.err = HeaderError;
|
tr.err = HeaderError;
|
||||||
}
|
}
|
||||||
return nil;
|
return nil;
|
||||||
|
|
@ -198,7 +198,7 @@ func (tr *Reader) readHeader() *Header {
|
||||||
// Maximum value of hdr.Size is 64 GB (12 octal digits),
|
// Maximum value of hdr.Size is 64 GB (12 octal digits),
|
||||||
// so there's no risk of int64 overflowing.
|
// so there's no risk of int64 overflowing.
|
||||||
tr.nb = int64(hdr.Size);
|
tr.nb = int64(hdr.Size);
|
||||||
tr.pad = -tr.nb & (blockSize-1); // blockSize is a power of two
|
tr.pad = -tr.nb & (blockSize - 1); // blockSize is a power of two
|
||||||
|
|
||||||
return hdr;
|
return hdr;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -120,7 +120,7 @@ func (tw *Writer) WriteHeader(hdr *Header) os.Error {
|
||||||
}
|
}
|
||||||
|
|
||||||
tw.nb = int64(hdr.Size);
|
tw.nb = int64(hdr.Size);
|
||||||
tw.pad = -tw.nb & (blockSize-1); // blockSize is a power of two
|
tw.pad = -tw.nb & (blockSize - 1); // blockSize is a power of two
|
||||||
|
|
||||||
header := make([]byte, blockSize);
|
header := make([]byte, blockSize);
|
||||||
s := slicer(header);
|
s := slicer(header);
|
||||||
|
|
|
||||||
|
|
@ -143,7 +143,7 @@ func (e *encoder) Write(p []byte) (n int, err os.Error) {
|
||||||
|
|
||||||
// Large interior chunks.
|
// Large interior chunks.
|
||||||
for len(p) > 3 {
|
for len(p) > 3 {
|
||||||
nn := len(e.out) / 4 * 3;
|
nn := len(e.out)/4*3;
|
||||||
if nn > len(p) {
|
if nn > len(p) {
|
||||||
nn = len(p);
|
nn = len(p);
|
||||||
}
|
}
|
||||||
|
|
@ -286,7 +286,7 @@ func (d *decoder) Read(p []byte) (n int, err os.Error) {
|
||||||
// Use leftover decoded output from last read.
|
// Use leftover decoded output from last read.
|
||||||
if len(d.out) > 0 {
|
if len(d.out) > 0 {
|
||||||
n = bytes.Copy(p, d.out);
|
n = bytes.Copy(p, d.out);
|
||||||
d.out = d.out[n : len(d.out)];
|
d.out = d.out[n:len(d.out)];
|
||||||
return n, nil;
|
return n, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -311,7 +311,7 @@ func (d *decoder) Read(p []byte) (n int, err os.Error) {
|
||||||
nw, d.end, d.err = d.enc.decode(d.buf[0:nr], &d.outbuf);
|
nw, d.end, d.err = d.enc.decode(d.buf[0:nr], &d.outbuf);
|
||||||
d.out = d.outbuf[0:nw];
|
d.out = d.outbuf[0:nw];
|
||||||
n = bytes.Copy(p, d.out);
|
n = bytes.Copy(p, d.out);
|
||||||
d.out = d.out[n : len(d.out)];
|
d.out = d.out[n:len(d.out)];
|
||||||
} else {
|
} else {
|
||||||
n, d.end, d.err = d.enc.decode(d.buf[0:nr], p);
|
n, d.end, d.err = d.enc.decode(d.buf[0:nr], p);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -104,7 +104,7 @@ func TestDecode(t *testing.T) {
|
||||||
testEqual(t, "Decode(%q) = error %v, want %v", p.encoded, err, os.Error(nil));
|
testEqual(t, "Decode(%q) = error %v, want %v", p.encoded, err, os.Error(nil));
|
||||||
testEqual(t, "Decode(%q) = length %v, want %v", p.encoded, count, len(p.decoded));
|
testEqual(t, "Decode(%q) = length %v, want %v", p.encoded, count, len(p.decoded));
|
||||||
if len(p.encoded) > 0 {
|
if len(p.encoded) > 0 {
|
||||||
testEqual(t, "Decode(%q) = end %v, want %v", p.encoded, end, (p.encoded[len(p.encoded) - 1] == '='));
|
testEqual(t, "Decode(%q) = end %v, want %v", p.encoded, end, (p.encoded[len(p.encoded)-1] == '='));
|
||||||
}
|
}
|
||||||
testEqual(t, "Decode(%q) = %q, want %q", p.encoded, string(dbuf[0:count]), p.decoded);
|
testEqual(t, "Decode(%q) = %q, want %q", p.encoded, string(dbuf[0:count]), p.decoded);
|
||||||
}
|
}
|
||||||
|
|
@ -130,7 +130,7 @@ func TestDecoder(t *testing.T) {
|
||||||
func TestDecoderBuffering(t *testing.T) {
|
func TestDecoderBuffering(t *testing.T) {
|
||||||
for bs := 1; bs <= 12; bs++ {
|
for bs := 1; bs <= 12; bs++ {
|
||||||
decoder := NewDecoder(StdEncoding, bytes.NewBufferString(bigtest.encoded));
|
decoder := NewDecoder(StdEncoding, bytes.NewBufferString(bigtest.encoded));
|
||||||
buf := make([]byte, len(bigtest.decoded) + 12);
|
buf := make([]byte, len(bigtest.decoded)+12);
|
||||||
var total int;
|
var total int;
|
||||||
for total = 0; total < len(bigtest.decoded); {
|
for total = 0; total < len(bigtest.decoded); {
|
||||||
n, err := decoder.Read(buf[total : total+bs]);
|
n, err := decoder.Read(buf[total : total+bs]);
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ func Mul128(x, y uint64) (z1, z0 uint64) {
|
||||||
// and return the product as 2 words.
|
// and return the product as 2 words.
|
||||||
|
|
||||||
const (
|
const (
|
||||||
W = uint(unsafe.Sizeof(x)) * 8;
|
W = uint(unsafe.Sizeof(x))*8;
|
||||||
W2 = W/2;
|
W2 = W/2;
|
||||||
B2 = 1<<W2;
|
B2 = 1<<W2;
|
||||||
M2 = B2-1;
|
M2 = B2-1;
|
||||||
|
|
@ -80,7 +80,7 @@ func MulAdd128(x, y, c uint64) (z1, z0 uint64) {
|
||||||
// and return the product as 2 words.
|
// and return the product as 2 words.
|
||||||
|
|
||||||
const (
|
const (
|
||||||
W = uint(unsafe.Sizeof(x)) * 8;
|
W = uint(unsafe.Sizeof(x))*8;
|
||||||
W2 = W/2;
|
W2 = W/2;
|
||||||
B2 = 1<<W2;
|
B2 = 1<<W2;
|
||||||
M2 = B2-1;
|
M2 = B2-1;
|
||||||
|
|
|
||||||
|
|
@ -899,9 +899,9 @@ func hexvalue(ch byte) uint {
|
||||||
case '0' <= ch && ch <= '9':
|
case '0' <= ch && ch <= '9':
|
||||||
d = uint(ch-'0');
|
d = uint(ch-'0');
|
||||||
case 'a' <= ch && ch <= 'f':
|
case 'a' <= ch && ch <= 'f':
|
||||||
d = uint(ch-'a') + 10;
|
d = uint(ch-'a')+10;
|
||||||
case 'A' <= ch && ch <= 'F':
|
case 'A' <= ch && ch <= 'F':
|
||||||
d = uint(ch-'A') + 10;
|
d = uint(ch-'A')+10;
|
||||||
}
|
}
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,7 @@ func nrDivEst(x0, y0 Natural) Natural {
|
||||||
// Determine a scale factor f = 2^e such that
|
// Determine a scale factor f = 2^e such that
|
||||||
// 0.5 <= y/f == y*(2^-e) < 1.0
|
// 0.5 <= y/f == y*(2^-e) < 1.0
|
||||||
// and scale y accordingly.
|
// and scale y accordingly.
|
||||||
e := int(y.m.Log2()) + 1;
|
e := int(y.m.Log2())+1;
|
||||||
y.e -= e;
|
y.e -= e;
|
||||||
|
|
||||||
// t1
|
// t1
|
||||||
|
|
@ -133,7 +133,7 @@ func nrDivEst(x0, y0 Natural) Natural {
|
||||||
// reduce mantissa size
|
// reduce mantissa size
|
||||||
// TODO: Find smaller bound as it will reduce
|
// TODO: Find smaller bound as it will reduce
|
||||||
// computation time massively.
|
// computation time massively.
|
||||||
d := int(r.m.Log2() + 1) - maxLen;
|
d := int(r.m.Log2() + 1)-maxLen;
|
||||||
if d > 0 {
|
if d > 0 {
|
||||||
r = fpNat{r.m.Shr(uint(d)), r.e + d};
|
r = fpNat{r.m.Shr(uint(d)), r.e + d};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ import (
|
||||||
// - buffered output
|
// - buffered output
|
||||||
|
|
||||||
const (
|
const (
|
||||||
defaultBufSize = 4096
|
defaultBufSize = 4096;
|
||||||
)
|
)
|
||||||
|
|
||||||
// Errors introduced by this package.
|
// Errors introduced by this package.
|
||||||
|
|
@ -31,20 +31,21 @@ type Error struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ErrInvalidUnreadByte os.Error = &Error{"bufio: invalid use of UnreadByte"};
|
ErrInvalidUnreadByte os.Error = &Error{"bufio: invalid use of UnreadByte"};
|
||||||
ErrBufferFull os.Error = &Error{"bufio: buffer full"};
|
ErrBufferFull os.Error = &Error{"bufio: buffer full"};
|
||||||
errInternal os.Error = &Error{"bufio: internal error"};
|
errInternal os.Error = &Error{"bufio: internal error"};
|
||||||
)
|
)
|
||||||
|
|
||||||
// BufSizeError is the error representing an invalid buffer size.
|
// BufSizeError is the error representing an invalid buffer size.
|
||||||
type BufSizeError int
|
type BufSizeError int
|
||||||
|
|
||||||
func (b BufSizeError) String() string {
|
func (b BufSizeError) String() string {
|
||||||
return "bufio: bad buffer size " + strconv.Itoa(int(b));
|
return "bufio: bad buffer size " + strconv.Itoa(int(b));
|
||||||
}
|
}
|
||||||
|
|
||||||
func copySlice(dst []byte, src []byte) {
|
func copySlice(dst []byte, src []byte) {
|
||||||
for i := 0; i < len(dst); i++ {
|
for i := 0; i < len(dst); i++ {
|
||||||
dst[i] = src[i]
|
dst[i] = src[i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -53,11 +54,11 @@ func copySlice(dst []byte, src []byte) {
|
||||||
|
|
||||||
// Reader implements buffering for an io.Reader object.
|
// Reader implements buffering for an io.Reader object.
|
||||||
type Reader struct {
|
type Reader struct {
|
||||||
buf []byte;
|
buf []byte;
|
||||||
rd io.Reader;
|
rd io.Reader;
|
||||||
r, w int;
|
r, w int;
|
||||||
err os.Error;
|
err os.Error;
|
||||||
lastbyte int;
|
lastbyte int;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewReaderSize creates a new Reader whose buffer has the specified size,
|
// NewReaderSize creates a new Reader whose buffer has the specified size,
|
||||||
|
|
@ -66,18 +67,18 @@ type Reader struct {
|
||||||
// It returns the Reader and any error.
|
// It returns the Reader and any error.
|
||||||
func NewReaderSize(rd io.Reader, size int) (*Reader, os.Error) {
|
func NewReaderSize(rd io.Reader, size int) (*Reader, os.Error) {
|
||||||
if size <= 0 {
|
if size <= 0 {
|
||||||
return nil, BufSizeError(size)
|
return nil, BufSizeError(size);
|
||||||
}
|
}
|
||||||
// Is it already a Reader?
|
// Is it already a Reader?
|
||||||
b, ok := rd.(*Reader);
|
b, ok := rd.(*Reader);
|
||||||
if ok && len(b.buf) >= size {
|
if ok && len(b.buf) >= size {
|
||||||
return b, nil
|
return b, nil;
|
||||||
}
|
}
|
||||||
b = new(Reader);
|
b = new(Reader);
|
||||||
b.buf = make([]byte, size);
|
b.buf = make([]byte, size);
|
||||||
b.rd = rd;
|
b.rd = rd;
|
||||||
b.lastbyte = -1;
|
b.lastbyte = -1;
|
||||||
return b, nil
|
return b, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewReader returns a new Reader whose buffer has the default size.
|
// NewReader returns a new Reader whose buffer has the default size.
|
||||||
|
|
@ -94,15 +95,15 @@ func NewReader(rd io.Reader) *Reader {
|
||||||
func (b *Reader) fill() {
|
func (b *Reader) fill() {
|
||||||
// Slide existing data to beginning.
|
// Slide existing data to beginning.
|
||||||
if b.w > b.r {
|
if b.w > b.r {
|
||||||
copySlice(b.buf[0:b.w-b.r], b.buf[b.r:b.w]);
|
copySlice(b.buf[0 : b.w - b.r], b.buf[b.r : b.w]);
|
||||||
b.w -= b.r;
|
b.w -= b.r;
|
||||||
} else {
|
} else {
|
||||||
b.w = 0
|
b.w = 0;
|
||||||
}
|
}
|
||||||
b.r = 0;
|
b.r = 0;
|
||||||
|
|
||||||
// Read new data.
|
// Read new data.
|
||||||
n, e := b.rd.Read(b.buf[b.w:len(b.buf)]);
|
n, e := b.rd.Read(b.buf[b.w : len(b.buf)]);
|
||||||
b.w += n;
|
b.w += n;
|
||||||
if e != nil {
|
if e != nil {
|
||||||
b.err = e;
|
b.err = e;
|
||||||
|
|
@ -120,7 +121,7 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
|
||||||
n := len(p);
|
n := len(p);
|
||||||
if b.w == b.r {
|
if b.w == b.r {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return nn, b.err
|
return nn, b.err;
|
||||||
}
|
}
|
||||||
if len(p) >= len(b.buf) {
|
if len(p) >= len(b.buf) {
|
||||||
// Large read, empty buffer.
|
// Large read, empty buffer.
|
||||||
|
|
@ -137,15 +138,15 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if n > b.w - b.r {
|
if n > b.w - b.r {
|
||||||
n = b.w - b.r
|
n = b.w - b.r;
|
||||||
}
|
}
|
||||||
copySlice(p[0:n], b.buf[b.r:b.r+n]);
|
copySlice(p[0:n], b.buf[b.r : b.r + n]);
|
||||||
p = p[n:len(p)];
|
p = p[n:len(p)];
|
||||||
b.r += n;
|
b.r += n;
|
||||||
b.lastbyte = int(b.buf[b.r-1]);
|
b.lastbyte = int(b.buf[b.r - 1]);
|
||||||
nn += n
|
nn += n;
|
||||||
}
|
}
|
||||||
return nn, nil
|
return nn, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadByte reads and returns a single byte.
|
// ReadByte reads and returns a single byte.
|
||||||
|
|
@ -153,14 +154,14 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
|
||||||
func (b *Reader) ReadByte() (c byte, err os.Error) {
|
func (b *Reader) ReadByte() (c byte, err os.Error) {
|
||||||
for b.w == b.r {
|
for b.w == b.r {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return 0, b.err
|
return 0, b.err;
|
||||||
}
|
}
|
||||||
b.fill();
|
b.fill();
|
||||||
}
|
}
|
||||||
c = b.buf[b.r];
|
c = b.buf[b.r];
|
||||||
b.r++;
|
b.r++;
|
||||||
b.lastbyte = int(c);
|
b.lastbyte = int(c);
|
||||||
return c, nil
|
return c, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnreadByte unreads the last byte. Only the most recently read byte can be unread.
|
// UnreadByte unreads the last byte. Only the most recently read byte can be unread.
|
||||||
|
|
@ -173,17 +174,17 @@ func (b *Reader) UnreadByte() os.Error {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
if b.r <= 0 {
|
if b.r <= 0 {
|
||||||
return ErrInvalidUnreadByte
|
return ErrInvalidUnreadByte;
|
||||||
}
|
}
|
||||||
b.r--;
|
b.r--;
|
||||||
b.lastbyte = -1;
|
b.lastbyte = -1;
|
||||||
return nil
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadRune reads a single UTF-8 encoded Unicode character and returns the
|
// ReadRune reads a single UTF-8 encoded Unicode character and returns the
|
||||||
// rune and its size in bytes.
|
// rune and its size in bytes.
|
||||||
func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
|
func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
|
||||||
for b.r + utf8.UTFMax > b.w && !utf8.FullRune(b.buf[b.r:b.w]) && b.err == nil {
|
for b.r + utf8.UTFMax > b.w && !utf8.FullRune(b.buf[b.r : b.w]) && b.err == nil {
|
||||||
b.fill();
|
b.fill();
|
||||||
}
|
}
|
||||||
if b.r == b.w {
|
if b.r == b.w {
|
||||||
|
|
@ -191,11 +192,11 @@ func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
|
||||||
}
|
}
|
||||||
rune, size = int(b.buf[b.r]), 1;
|
rune, size = int(b.buf[b.r]), 1;
|
||||||
if rune >= 0x80 {
|
if rune >= 0x80 {
|
||||||
rune, size = utf8.DecodeRune(b.buf[b.r:b.w]);
|
rune, size = utf8.DecodeRune(b.buf[b.r : b.w]);
|
||||||
}
|
}
|
||||||
b.r += size;
|
b.r += size;
|
||||||
b.lastbyte = int(b.buf[b.r-1]);
|
b.lastbyte = int(b.buf[b.r - 1]);
|
||||||
return rune, size, nil
|
return rune, size, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function: look for byte c in array p,
|
// Helper function: look for byte c in array p,
|
||||||
|
|
@ -203,10 +204,10 @@ func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
|
||||||
func findByte(p []byte, c byte) int {
|
func findByte(p []byte, c byte) int {
|
||||||
for i := 0; i < len(p); i++ {
|
for i := 0; i < len(p); i++ {
|
||||||
if p[i] == c {
|
if p[i] == c {
|
||||||
return i
|
return i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return -1
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Buffered returns the number of bytes that can be read from the current buffer.
|
// Buffered returns the number of bytes that can be read from the current buffer.
|
||||||
|
|
@ -226,33 +227,33 @@ func (b *Reader) Buffered() int {
|
||||||
// ReadSlice returns err != nil if and only if line does not end in delim.
|
// ReadSlice returns err != nil if and only if line does not end in delim.
|
||||||
func (b *Reader) ReadSlice(delim byte) (line []byte, err os.Error) {
|
func (b *Reader) ReadSlice(delim byte) (line []byte, err os.Error) {
|
||||||
// Look in buffer.
|
// Look in buffer.
|
||||||
if i := findByte(b.buf[b.r:b.w], delim); i >= 0 {
|
if i := findByte(b.buf[b.r : b.w], delim); i >= 0 {
|
||||||
line1 := b.buf[b.r:b.r+i+1];
|
line1 := b.buf[b.r : b.r + i + 1];
|
||||||
b.r += i+1;
|
b.r += i+1;
|
||||||
return line1, nil
|
return line1, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read more into buffer, until buffer fills or we find delim.
|
// Read more into buffer, until buffer fills or we find delim.
|
||||||
for {
|
for {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
line := b.buf[b.r:b.w];
|
line := b.buf[b.r : b.w];
|
||||||
b.r = b.w;
|
b.r = b.w;
|
||||||
return line, b.err
|
return line, b.err;
|
||||||
}
|
}
|
||||||
|
|
||||||
n := b.Buffered();
|
n := b.Buffered();
|
||||||
b.fill();
|
b.fill();
|
||||||
|
|
||||||
// Search new part of buffer
|
// Search new part of buffer
|
||||||
if i := findByte(b.buf[n:b.w], delim); i >= 0 {
|
if i := findByte(b.buf[n : b.w], delim); i >= 0 {
|
||||||
line := b.buf[0:n+i+1];
|
line := b.buf[0 : n+i+1];
|
||||||
b.r = n+i+1;
|
b.r = n+i+1;
|
||||||
return line, nil
|
return line, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Buffer is full?
|
// Buffer is full?
|
||||||
if b.Buffered() >= len(b.buf) {
|
if b.Buffered() >= len(b.buf) {
|
||||||
return nil, ErrBufferFull
|
return nil, ErrBufferFull;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
panic("not reached");
|
panic("not reached");
|
||||||
|
|
@ -275,11 +276,11 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
|
||||||
var e os.Error;
|
var e os.Error;
|
||||||
frag, e = b.ReadSlice(delim);
|
frag, e = b.ReadSlice(delim);
|
||||||
if e == nil { // got final fragment
|
if e == nil { // got final fragment
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
if e != ErrBufferFull { // unexpected error
|
if e != ErrBufferFull { // unexpected error
|
||||||
err = e;
|
err = e;
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read bytes out of buffer.
|
// Read bytes out of buffer.
|
||||||
|
|
@ -289,12 +290,12 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
|
||||||
if e != nil {
|
if e != nil {
|
||||||
frag = buf[0:n];
|
frag = buf[0:n];
|
||||||
err = e;
|
err = e;
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
if n != len(buf) {
|
if n != len(buf) {
|
||||||
frag = buf[0:n];
|
frag = buf[0:n];
|
||||||
err = errInternal;
|
err = errInternal;
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grow list if needed.
|
// Grow list if needed.
|
||||||
|
|
@ -305,7 +306,7 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
|
||||||
for i := 0; i < len(full); i++ {
|
for i := 0; i < len(full); i++ {
|
||||||
newfull[i] = full[i];
|
newfull[i] = full[i];
|
||||||
}
|
}
|
||||||
full = newfull
|
full = newfull;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save buffer
|
// Save buffer
|
||||||
|
|
@ -316,7 +317,7 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
|
||||||
// Allocate new buffer to hold the full pieces and the fragment.
|
// Allocate new buffer to hold the full pieces and the fragment.
|
||||||
n := 0;
|
n := 0;
|
||||||
for i := 0; i < nfull; i++ {
|
for i := 0; i < nfull; i++ {
|
||||||
n += len(full[i])
|
n += len(full[i]);
|
||||||
}
|
}
|
||||||
n += len(frag);
|
n += len(frag);
|
||||||
|
|
||||||
|
|
@ -324,11 +325,11 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
|
||||||
buf := make([]byte, n);
|
buf := make([]byte, n);
|
||||||
n = 0;
|
n = 0;
|
||||||
for i := 0; i < nfull; i++ {
|
for i := 0; i < nfull; i++ {
|
||||||
copySlice(buf[n:n+len(full[i])], full[i]);
|
copySlice(buf[n : n+len(full[i])], full[i]);
|
||||||
n += len(full[i])
|
n += len(full[i]);
|
||||||
}
|
}
|
||||||
copySlice(buf[n:n+len(frag)], frag);
|
copySlice(buf[n : n+len(frag)], frag);
|
||||||
return buf, err
|
return buf, err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadString reads until the first occurrence of delim in the input,
|
// ReadString reads until the first occurrence of delim in the input,
|
||||||
|
|
@ -346,10 +347,10 @@ func (b *Reader) ReadString(delim byte) (line string, err os.Error) {
|
||||||
|
|
||||||
// Writer implements buffering for an io.Writer object.
|
// Writer implements buffering for an io.Writer object.
|
||||||
type Writer struct {
|
type Writer struct {
|
||||||
err os.Error;
|
err os.Error;
|
||||||
buf []byte;
|
buf []byte;
|
||||||
n int;
|
n int;
|
||||||
wr io.Writer;
|
wr io.Writer;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewWriterSize creates a new Writer whose buffer has the specified size,
|
// NewWriterSize creates a new Writer whose buffer has the specified size,
|
||||||
|
|
@ -358,17 +359,17 @@ type Writer struct {
|
||||||
// It returns the Writer and any error.
|
// It returns the Writer and any error.
|
||||||
func NewWriterSize(wr io.Writer, size int) (*Writer, os.Error) {
|
func NewWriterSize(wr io.Writer, size int) (*Writer, os.Error) {
|
||||||
if size <= 0 {
|
if size <= 0 {
|
||||||
return nil, BufSizeError(size)
|
return nil, BufSizeError(size);
|
||||||
}
|
}
|
||||||
// Is it already a Writer?
|
// Is it already a Writer?
|
||||||
b, ok := wr.(*Writer);
|
b, ok := wr.(*Writer);
|
||||||
if ok && len(b.buf) >= size {
|
if ok && len(b.buf) >= size {
|
||||||
return b, nil
|
return b, nil;
|
||||||
}
|
}
|
||||||
b = new(Writer);
|
b = new(Writer);
|
||||||
b.buf = make([]byte, size);
|
b.buf = make([]byte, size);
|
||||||
b.wr = wr;
|
b.wr = wr;
|
||||||
return b, nil
|
return b, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewWriter returns a new Writer whose buffer has the default size.
|
// NewWriter returns a new Writer whose buffer has the default size.
|
||||||
|
|
@ -384,32 +385,32 @@ func NewWriter(wr io.Writer) *Writer {
|
||||||
// Flush writes any buffered data to the underlying io.Writer.
|
// Flush writes any buffered data to the underlying io.Writer.
|
||||||
func (b *Writer) Flush() os.Error {
|
func (b *Writer) Flush() os.Error {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return b.err
|
return b.err;
|
||||||
}
|
}
|
||||||
n, e := b.wr.Write(b.buf[0:b.n]);
|
n, e := b.wr.Write(b.buf[0 : b.n]);
|
||||||
if n < b.n && e == nil {
|
if n < b.n && e == nil {
|
||||||
e = io.ErrShortWrite;
|
e = io.ErrShortWrite;
|
||||||
}
|
}
|
||||||
if e != nil {
|
if e != nil {
|
||||||
if n > 0 && n < b.n {
|
if n > 0 && n < b.n {
|
||||||
copySlice(b.buf[0:b.n-n], b.buf[n:b.n])
|
copySlice(b.buf[0 : b.n - n], b.buf[n : b.n]);
|
||||||
}
|
}
|
||||||
b.n -= n;
|
b.n -= n;
|
||||||
b.err = e;
|
b.err = e;
|
||||||
return e
|
return e;
|
||||||
}
|
}
|
||||||
b.n = 0;
|
b.n = 0;
|
||||||
return nil
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Available returns how many bytes are unused in the buffer.
|
// Available returns how many bytes are unused in the buffer.
|
||||||
func (b *Writer) Available() int {
|
func (b *Writer) Available() int {
|
||||||
return len(b.buf) - b.n
|
return len(b.buf) - b.n;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Buffered returns the number of bytes that have been written into the current buffer.
|
// Buffered returns the number of bytes that have been written into the current buffer.
|
||||||
func (b *Writer) Buffered() int {
|
func (b *Writer) Buffered() int {
|
||||||
return b.n
|
return b.n;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write writes the contents of p into the buffer.
|
// Write writes the contents of p into the buffer.
|
||||||
|
|
@ -418,16 +419,16 @@ func (b *Writer) Buffered() int {
|
||||||
// why the write is short.
|
// why the write is short.
|
||||||
func (b *Writer) Write(p []byte) (nn int, err os.Error) {
|
func (b *Writer) Write(p []byte) (nn int, err os.Error) {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return 0, b.err
|
return 0, b.err;
|
||||||
}
|
}
|
||||||
nn = 0;
|
nn = 0;
|
||||||
for len(p) > 0 {
|
for len(p) > 0 {
|
||||||
n := b.Available();
|
n := b.Available();
|
||||||
if n <= 0 {
|
if n <= 0 {
|
||||||
if b.Flush(); b.err != nil {
|
if b.Flush(); b.err != nil {
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
n = b.Available()
|
n = b.Available();
|
||||||
}
|
}
|
||||||
if b.Available() == 0 && len(p) >= len(b.buf) {
|
if b.Available() == 0 && len(p) >= len(b.buf) {
|
||||||
// Large write, empty buffer.
|
// Large write, empty buffer.
|
||||||
|
|
@ -441,33 +442,33 @@ func (b *Writer) Write(p []byte) (nn int, err os.Error) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if n > len(p) {
|
if n > len(p) {
|
||||||
n = len(p)
|
n = len(p);
|
||||||
}
|
}
|
||||||
copySlice(b.buf[b.n:b.n+n], p[0:n]);
|
copySlice(b.buf[b.n : b.n + n], p[0:n]);
|
||||||
b.n += n;
|
b.n += n;
|
||||||
nn += n;
|
nn += n;
|
||||||
p = p[n:len(p)]
|
p = p[n:len(p)];
|
||||||
}
|
}
|
||||||
return nn, b.err
|
return nn, b.err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteByte writes a single byte.
|
// WriteByte writes a single byte.
|
||||||
func (b *Writer) WriteByte(c byte) os.Error {
|
func (b *Writer) WriteByte(c byte) os.Error {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return b.err
|
return b.err;
|
||||||
}
|
}
|
||||||
if b.Available() <= 0 && b.Flush() != nil {
|
if b.Available() <= 0 && b.Flush() != nil {
|
||||||
return b.err
|
return b.err;
|
||||||
}
|
}
|
||||||
b.buf[b.n] = c;
|
b.buf[b.n] = c;
|
||||||
b.n++;
|
b.n++;
|
||||||
return nil
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteString writes a string.
|
// WriteString writes a string.
|
||||||
func (b *Writer) WriteString(s string) os.Error {
|
func (b *Writer) WriteString(s string) os.Error {
|
||||||
if b.err != nil {
|
if b.err != nil {
|
||||||
return b.err
|
return b.err;
|
||||||
}
|
}
|
||||||
// Common case, worth making fast.
|
// Common case, worth making fast.
|
||||||
if b.Available() >= len(s) || len(b.buf) >= len(s) && b.Flush() == nil {
|
if b.Available() >= len(s) || len(b.buf) >= len(s) && b.Flush() == nil {
|
||||||
|
|
@ -480,7 +481,7 @@ func (b *Writer) WriteString(s string) os.Error {
|
||||||
for i := 0; i < len(s); i++ { // loop over bytes, not runes.
|
for i := 0; i < len(s); i++ { // loop over bytes, not runes.
|
||||||
b.WriteByte(s[i]);
|
b.WriteByte(s[i]);
|
||||||
}
|
}
|
||||||
return b.err
|
return b.err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// buffered input and output
|
// buffered input and output
|
||||||
|
|
@ -494,6 +495,5 @@ type ReadWriter struct {
|
||||||
|
|
||||||
// NewReadWriter allocates a new ReadWriter that dispatches to r and w.
|
// NewReadWriter allocates a new ReadWriter that dispatches to r and w.
|
||||||
func NewReadWriter(r *Reader, w *Writer) *ReadWriter {
|
func NewReadWriter(r *Reader, w *Writer) *ReadWriter {
|
||||||
return &ReadWriter{r, w}
|
return &ReadWriter{r, w};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -38,19 +38,19 @@ type Buffer struct {
|
||||||
// Bytes returns the contents of the unread portion of the buffer;
|
// Bytes returns the contents of the unread portion of the buffer;
|
||||||
// len(b.Bytes()) == b.Len().
|
// len(b.Bytes()) == b.Len().
|
||||||
func (b *Buffer) Bytes() []byte {
|
func (b *Buffer) Bytes() []byte {
|
||||||
return b.buf[b.off : len(b.buf)]
|
return b.buf[b.off : len(b.buf)];
|
||||||
}
|
}
|
||||||
|
|
||||||
// String returns the contents of the unread portion of the buffer
|
// String returns the contents of the unread portion of the buffer
|
||||||
// as a string.
|
// as a string.
|
||||||
func (b *Buffer) String() string {
|
func (b *Buffer) String() string {
|
||||||
return string(b.buf[b.off : len(b.buf)])
|
return string(b.buf[b.off : len(b.buf)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Len returns the number of bytes of the unread portion of the buffer;
|
// Len returns the number of bytes of the unread portion of the buffer;
|
||||||
// b.Len() == len(b.Bytes()).
|
// b.Len() == len(b.Bytes()).
|
||||||
func (b *Buffer) Len() int {
|
func (b *Buffer) Len() int {
|
||||||
return len(b.buf) - b.off
|
return len(b.buf) - b.off;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate discards all but the first n unread bytes from the buffer.
|
// Truncate discards all but the first n unread bytes from the buffer.
|
||||||
|
|
@ -75,21 +75,21 @@ func (b *Buffer) Write(p []byte) (n int, err os.Error) {
|
||||||
m := b.Len();
|
m := b.Len();
|
||||||
n = len(p);
|
n = len(p);
|
||||||
|
|
||||||
if len(b.buf) + n > cap(b.buf) {
|
if len(b.buf)+n > cap(b.buf) {
|
||||||
// not enough space at end
|
// not enough space at end
|
||||||
buf := b.buf;
|
buf := b.buf;
|
||||||
if m + n > cap(b.buf) {
|
if m+n > cap(b.buf) {
|
||||||
// not enough space anywhere
|
// not enough space anywhere
|
||||||
buf = make([]byte, 2*cap(b.buf) + n)
|
buf = make([]byte, 2*cap(b.buf) + n);
|
||||||
}
|
}
|
||||||
copyBytes(buf, 0, b.buf[b.off:b.off+m]);
|
copyBytes(buf, 0, b.buf[b.off : b.off + m]);
|
||||||
b.buf = buf;
|
b.buf = buf;
|
||||||
b.off = 0
|
b.off = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
b.buf = b.buf[0 : b.off + m + n];
|
b.buf = b.buf[0 : b.off + m + n];
|
||||||
copyBytes(b.buf, b.off + m, p);
|
copyBytes(b.buf, b.off + m, p);
|
||||||
return n, nil
|
return n, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteString appends the contents of s to the buffer. The return
|
// WriteString appends the contents of s to the buffer. The return
|
||||||
|
|
@ -98,21 +98,21 @@ func (b *Buffer) WriteString(s string) (n int, err os.Error) {
|
||||||
m := b.Len();
|
m := b.Len();
|
||||||
n = len(s);
|
n = len(s);
|
||||||
|
|
||||||
if len(b.buf) + n > cap(b.buf) {
|
if len(b.buf)+n > cap(b.buf) {
|
||||||
// not enough space at end
|
// not enough space at end
|
||||||
buf := b.buf;
|
buf := b.buf;
|
||||||
if m + n > cap(b.buf) {
|
if m+n > cap(b.buf) {
|
||||||
// not enough space anywhere
|
// not enough space anywhere
|
||||||
buf = make([]byte, 2*cap(b.buf) + n)
|
buf = make([]byte, 2*cap(b.buf) + n);
|
||||||
}
|
}
|
||||||
copyBytes(buf, 0, b.buf[b.off:b.off+m]);
|
copyBytes(buf, 0, b.buf[b.off : b.off + m]);
|
||||||
b.buf = buf;
|
b.buf = buf;
|
||||||
b.off = 0
|
b.off = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
b.buf = b.buf[0 : b.off + m + n];
|
b.buf = b.buf[0 : b.off + m + n];
|
||||||
copyString(b.buf, b.off+m, s);
|
copyString(b.buf, b.off + m, s);
|
||||||
return n, nil
|
return n, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteByte appends the byte c to the buffer.
|
// WriteByte appends the byte c to the buffer.
|
||||||
|
|
@ -134,19 +134,19 @@ func (b *Buffer) WriteByte(c byte) os.Error {
|
||||||
// otherwise it is nil.
|
// otherwise it is nil.
|
||||||
func (b *Buffer) Read(p []byte) (n int, err os.Error) {
|
func (b *Buffer) Read(p []byte) (n int, err os.Error) {
|
||||||
if b.off >= len(b.buf) {
|
if b.off >= len(b.buf) {
|
||||||
return 0, os.EOF
|
return 0, os.EOF;
|
||||||
}
|
}
|
||||||
m := b.Len();
|
m := b.Len();
|
||||||
n = len(p);
|
n = len(p);
|
||||||
|
|
||||||
if n > m {
|
if n > m {
|
||||||
// more bytes requested than available
|
// more bytes requested than available
|
||||||
n = m
|
n = m;
|
||||||
}
|
}
|
||||||
|
|
||||||
copyBytes(p, 0, b.buf[b.off:b.off+n]);
|
copyBytes(p, 0, b.buf[b.off : b.off + n]);
|
||||||
b.off += n;
|
b.off += n;
|
||||||
return n, err
|
return n, err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadByte reads and returns the next byte from the buffer.
|
// ReadByte reads and returns the next byte from the buffer.
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ var bytes []byte // test data; same as data but as a slice.
|
||||||
func init() {
|
func init() {
|
||||||
bytes = make([]byte, N);
|
bytes = make([]byte, N);
|
||||||
for i := 0; i < N; i++ {
|
for i := 0; i < N; i++ {
|
||||||
bytes[i] = 'a' + byte(i%26);
|
bytes[i] = 'a'+byte(i%26);
|
||||||
}
|
}
|
||||||
data = string(bytes);
|
data = string(bytes);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,10 +5,10 @@
|
||||||
package bytes_test
|
package bytes_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
. "bytes";
|
. "bytes";
|
||||||
"strings";
|
"strings";
|
||||||
"testing";
|
"testing";
|
||||||
"unicode";
|
"unicode";
|
||||||
)
|
)
|
||||||
|
|
||||||
func eq(a, b []string) bool {
|
func eq(a, b []string) bool {
|
||||||
|
|
@ -26,9 +26,9 @@ func eq(a, b []string) bool {
|
||||||
func arrayOfString(a [][]byte) []string {
|
func arrayOfString(a [][]byte) []string {
|
||||||
result := make([]string, len(a));
|
result := make([]string, len(a));
|
||||||
for j := 0; j < len(a); j++ {
|
for j := 0; j < len(a); j++ {
|
||||||
result[j] = string(a[j])
|
result[j] = string(a[j]);
|
||||||
}
|
}
|
||||||
return result
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// For ease of reading, the test cases use strings that are converted to byte
|
// For ease of reading, the test cases use strings that are converted to byte
|
||||||
|
|
@ -40,21 +40,22 @@ var commas = "1,2,3,4"
|
||||||
var dots = "1....2....3....4"
|
var dots = "1....2....3....4"
|
||||||
|
|
||||||
type CompareTest struct {
|
type CompareTest struct {
|
||||||
a string;
|
a string;
|
||||||
b string;
|
b string;
|
||||||
cmp int;
|
cmp int;
|
||||||
}
|
}
|
||||||
var comparetests = []CompareTest {
|
|
||||||
CompareTest{ "", "", 0 },
|
var comparetests = []CompareTest{
|
||||||
CompareTest{ "a", "", 1 },
|
CompareTest{"", "", 0},
|
||||||
CompareTest{ "", "a", -1 },
|
CompareTest{"a", "", 1},
|
||||||
CompareTest{ "abc", "abc", 0 },
|
CompareTest{"", "a", -1},
|
||||||
CompareTest{ "ab", "abc", -1 },
|
CompareTest{"abc", "abc", 0},
|
||||||
CompareTest{ "abc", "ab", 1 },
|
CompareTest{"ab", "abc", -1},
|
||||||
CompareTest{ "x", "ab", 1 },
|
CompareTest{"abc", "ab", 1},
|
||||||
CompareTest{ "ab", "x", -1 },
|
CompareTest{"x", "ab", 1},
|
||||||
CompareTest{ "x", "a", 1 },
|
CompareTest{"ab", "x", -1},
|
||||||
CompareTest{ "b", "x", -1 },
|
CompareTest{"x", "a", 1},
|
||||||
|
CompareTest{"b", "x", -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompare(t *testing.T) {
|
func TestCompare(t *testing.T) {
|
||||||
|
|
@ -67,7 +68,7 @@ func TestCompare(t *testing.T) {
|
||||||
if cmp != tt.cmp {
|
if cmp != tt.cmp {
|
||||||
t.Errorf(`Compare(%q, %q) = %v`, tt.a, tt.b, cmp);
|
t.Errorf(`Compare(%q, %q) = %v`, tt.a, tt.b, cmp);
|
||||||
}
|
}
|
||||||
if eql != (tt.cmp==0) {
|
if eql != (tt.cmp == 0) {
|
||||||
t.Errorf(`Equal(%q, %q) = %v`, tt.a, tt.b, eql);
|
t.Errorf(`Equal(%q, %q) = %v`, tt.a, tt.b, eql);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -75,17 +76,19 @@ func TestCompare(t *testing.T) {
|
||||||
|
|
||||||
|
|
||||||
type ExplodeTest struct {
|
type ExplodeTest struct {
|
||||||
s string;
|
s string;
|
||||||
n int;
|
n int;
|
||||||
a []string;
|
a []string;
|
||||||
}
|
}
|
||||||
var explodetests = []ExplodeTest {
|
|
||||||
ExplodeTest{ abcd, 0, []string{"a", "b", "c", "d"} },
|
var explodetests = []ExplodeTest{
|
||||||
ExplodeTest{ faces, 0, []string{"☺", "☻", "☹"} },
|
ExplodeTest{abcd, 0, []string{"a", "b", "c", "d"}},
|
||||||
ExplodeTest{ abcd, 2, []string{"a", "bcd"} },
|
ExplodeTest{faces, 0, []string{"☺", "☻", "☹"}},
|
||||||
|
ExplodeTest{abcd, 2, []string{"a", "bcd"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExplode(t *testing.T) {
|
func TestExplode(t *testing.T) {
|
||||||
for _, tt := range(explodetests) {
|
for _, tt := range (explodetests) {
|
||||||
a := Split(strings.Bytes(tt.s), nil, tt.n);
|
a := Split(strings.Bytes(tt.s), nil, tt.n);
|
||||||
result := arrayOfString(a);
|
result := arrayOfString(a);
|
||||||
if !eq(result, tt.a) {
|
if !eq(result, tt.a) {
|
||||||
|
|
@ -101,25 +104,26 @@ func TestExplode(t *testing.T) {
|
||||||
|
|
||||||
|
|
||||||
type SplitTest struct {
|
type SplitTest struct {
|
||||||
s string;
|
s string;
|
||||||
sep string;
|
sep string;
|
||||||
n int;
|
n int;
|
||||||
a []string;
|
a []string;
|
||||||
}
|
}
|
||||||
var splittests = []SplitTest {
|
|
||||||
SplitTest{ abcd, "a", 0, []string{"", "bcd"} },
|
var splittests = []SplitTest{
|
||||||
SplitTest{ abcd, "z", 0, []string{"abcd"} },
|
SplitTest{abcd, "a", 0, []string{"", "bcd"}},
|
||||||
SplitTest{ abcd, "", 0, []string{"a", "b", "c", "d"} },
|
SplitTest{abcd, "z", 0, []string{"abcd"}},
|
||||||
SplitTest{ commas, ",", 0, []string{"1", "2", "3", "4"} },
|
SplitTest{abcd, "", 0, []string{"a", "b", "c", "d"}},
|
||||||
SplitTest{ dots, "...", 0, []string{"1", ".2", ".3", ".4"} },
|
SplitTest{commas, ",", 0, []string{"1", "2", "3", "4"}},
|
||||||
SplitTest{ faces, "☹", 0, []string{"☺☻", ""} },
|
SplitTest{dots, "...", 0, []string{"1", ".2", ".3", ".4"}},
|
||||||
SplitTest{ faces, "~", 0, []string{faces} },
|
SplitTest{faces, "☹", 0, []string{"☺☻", ""}},
|
||||||
SplitTest{ faces, "", 0, []string{"☺", "☻", "☹"} },
|
SplitTest{faces, "~", 0, []string{faces}},
|
||||||
SplitTest{ "1 2 3 4", " ", 3, []string{"1", "2", "3 4"} },
|
SplitTest{faces, "", 0, []string{"☺", "☻", "☹"}},
|
||||||
SplitTest{ "1 2 3", " ", 3, []string{"1", "2", "3"} },
|
SplitTest{"1 2 3 4", " ", 3, []string{"1", "2", "3 4"}},
|
||||||
SplitTest{ "1 2", " ", 3, []string{"1", "2"} },
|
SplitTest{"1 2 3", " ", 3, []string{"1", "2", "3"}},
|
||||||
SplitTest{ "123", "", 2, []string{"1", "23"} },
|
SplitTest{"1 2", " ", 3, []string{"1", "2"}},
|
||||||
SplitTest{ "123", "", 17, []string{"1", "2", "3"} },
|
SplitTest{"123", "", 2, []string{"1", "23"}},
|
||||||
|
SplitTest{"123", "", 17, []string{"1", "2", "3"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSplit(t *testing.T) {
|
func TestSplit(t *testing.T) {
|
||||||
|
|
@ -143,14 +147,15 @@ type CopyTest struct {
|
||||||
n int;
|
n int;
|
||||||
res string;
|
res string;
|
||||||
}
|
}
|
||||||
var copytests = []CopyTest {
|
|
||||||
CopyTest{ "", "", 0, "" },
|
var copytests = []CopyTest{
|
||||||
CopyTest{ "a", "", 0, "a" },
|
CopyTest{"", "", 0, ""},
|
||||||
CopyTest{ "a", "a", 1, "a" },
|
CopyTest{"a", "", 0, "a"},
|
||||||
CopyTest{ "a", "b", 1, "b" },
|
CopyTest{"a", "a", 1, "a"},
|
||||||
CopyTest{ "xyz", "abc", 3, "abc" },
|
CopyTest{"a", "b", 1, "b"},
|
||||||
CopyTest{ "wxyz", "abc", 3, "abcz" },
|
CopyTest{"xyz", "abc", 3, "abc"},
|
||||||
CopyTest{ "xyz", "abcd", 3, "abc" },
|
CopyTest{"wxyz", "abc", 3, "abcz"},
|
||||||
|
CopyTest{"xyz", "abcd", 3, "abc"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCopy(t *testing.T) {
|
func TestCopy(t *testing.T) {
|
||||||
|
|
@ -172,7 +177,7 @@ type StringTest struct {
|
||||||
in, out string;
|
in, out string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var upperTests = []StringTest {
|
var upperTests = []StringTest{
|
||||||
StringTest{"", ""},
|
StringTest{"", ""},
|
||||||
StringTest{"abc", "ABC"},
|
StringTest{"abc", "ABC"},
|
||||||
StringTest{"AbC123", "ABC123"},
|
StringTest{"AbC123", "ABC123"},
|
||||||
|
|
@ -180,7 +185,7 @@ var upperTests = []StringTest {
|
||||||
StringTest{"\u0250\u0250\u0250\u0250\u0250", "\u2C6F\u2C6F\u2C6F\u2C6F\u2C6F"}, // grows one byte per char
|
StringTest{"\u0250\u0250\u0250\u0250\u0250", "\u2C6F\u2C6F\u2C6F\u2C6F\u2C6F"}, // grows one byte per char
|
||||||
}
|
}
|
||||||
|
|
||||||
var lowerTests = []StringTest {
|
var lowerTests = []StringTest{
|
||||||
StringTest{"", ""},
|
StringTest{"", ""},
|
||||||
StringTest{"abc", "abc"},
|
StringTest{"abc", "abc"},
|
||||||
StringTest{"AbC123", "abc123"},
|
StringTest{"AbC123", "abc123"},
|
||||||
|
|
@ -190,10 +195,10 @@ var lowerTests = []StringTest {
|
||||||
|
|
||||||
const space = "\t\v\r\f\n\u0085\u00a0\u2000\u3000"
|
const space = "\t\v\r\f\n\u0085\u00a0\u2000\u3000"
|
||||||
|
|
||||||
var trimSpaceTests = []StringTest {
|
var trimSpaceTests = []StringTest{
|
||||||
StringTest{"", ""},
|
StringTest{"", ""},
|
||||||
StringTest{"abc", "abc"},
|
StringTest{"abc", "abc"},
|
||||||
StringTest{space + "abc" + space, "abc"},
|
StringTest{space+"abc"+space, "abc"},
|
||||||
StringTest{" ", ""},
|
StringTest{" ", ""},
|
||||||
StringTest{" \t\r\n \t\t\r\r\n\n ", ""},
|
StringTest{" \t\r\n \t\t\r\r\n\n ", ""},
|
||||||
StringTest{" \t\r\n x\t\t\r\r\n\n ", "x"},
|
StringTest{" \t\r\n x\t\t\r\r\n\n ", "x"},
|
||||||
|
|
@ -227,23 +232,27 @@ func runStringTests(t *testing.T, f func([]byte) []byte, funcName string, testCa
|
||||||
func tenRunes(rune int) string {
|
func tenRunes(rune int) string {
|
||||||
r := make([]int, 10);
|
r := make([]int, 10);
|
||||||
for i := range r {
|
for i := range r {
|
||||||
r[i] = rune
|
r[i] = rune;
|
||||||
}
|
}
|
||||||
return string(r)
|
return string(r);
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMap(t *testing.T) {
|
func TestMap(t *testing.T) {
|
||||||
// Run a couple of awful growth/shrinkage tests
|
// Run a couple of awful growth/shrinkage tests
|
||||||
a := tenRunes('a');
|
a := tenRunes('a');
|
||||||
// 1. Grow. This triggers two reallocations in Map.
|
// 1. Grow. This triggers two reallocations in Map.
|
||||||
maxRune := func(rune int) int { return unicode.MaxRune };
|
maxRune := func(rune int) int {
|
||||||
|
return unicode.MaxRune;
|
||||||
|
};
|
||||||
m := Map(maxRune, Bytes(a));
|
m := Map(maxRune, Bytes(a));
|
||||||
expect := tenRunes(unicode.MaxRune);
|
expect := tenRunes(unicode.MaxRune);
|
||||||
if string(m) != expect {
|
if string(m) != expect {
|
||||||
t.Errorf("growing: expected %q got %q", expect, m);
|
t.Errorf("growing: expected %q got %q", expect, m);
|
||||||
}
|
}
|
||||||
// 2. Shrink
|
// 2. Shrink
|
||||||
minRune := func(rune int) int { return 'a' };
|
minRune := func(rune int) int {
|
||||||
|
return 'a';
|
||||||
|
};
|
||||||
m = Map(minRune, Bytes(tenRunes(unicode.MaxRune)));
|
m = Map(minRune, Bytes(tenRunes(unicode.MaxRune)));
|
||||||
expect = a;
|
expect = a;
|
||||||
if string(m) != expect {
|
if string(m) != expect {
|
||||||
|
|
@ -264,24 +273,25 @@ func TestTrimSpace(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type AddTest struct {
|
type AddTest struct {
|
||||||
s, t string;
|
s, t string;
|
||||||
cap int;
|
cap int;
|
||||||
}
|
}
|
||||||
var addtests = []AddTest {
|
|
||||||
AddTest{ "", "", 0 },
|
var addtests = []AddTest{
|
||||||
AddTest{ "a", "", 1 },
|
AddTest{"", "", 0},
|
||||||
AddTest{ "a", "b", 1 },
|
AddTest{"a", "", 1},
|
||||||
AddTest{ "abc", "def", 100 },
|
AddTest{"a", "b", 1},
|
||||||
|
AddTest{"abc", "def", 100},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAdd(t *testing.T) {
|
func TestAdd(t *testing.T) {
|
||||||
for _, test := range addtests {
|
for _, test := range addtests {
|
||||||
b := make([]byte, len(test.s), test.cap);
|
b := make([]byte, len(test.s), test.cap);
|
||||||
for i := 0; i < len(test.s); i++ {
|
for i := 0; i < len(test.s); i++ {
|
||||||
b[i] = test.s[i]
|
b[i] = test.s[i];
|
||||||
}
|
}
|
||||||
b = Add(b, strings.Bytes(test.t));
|
b = Add(b, strings.Bytes(test.t));
|
||||||
if string(b) != test.s+test.t {
|
if string(b) != test.s + test.t {
|
||||||
t.Errorf("Add(%q,%q) = %q", test.s, test.t, string(b));
|
t.Errorf("Add(%q,%q) = %q", test.s, test.t, string(b));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -291,7 +301,7 @@ func TestAddByte(t *testing.T) {
|
||||||
const N = 2e5;
|
const N = 2e5;
|
||||||
b := make([]byte, 0);
|
b := make([]byte, 0);
|
||||||
for i := 0; i < N; i++ {
|
for i := 0; i < N; i++ {
|
||||||
b = AddByte(b, byte(i))
|
b = AddByte(b, byte(i));
|
||||||
}
|
}
|
||||||
if len(b) != N {
|
if len(b) != N {
|
||||||
t.Errorf("AddByte: too small; expected %d got %d", N, len(b));
|
t.Errorf("AddByte: too small; expected %d got %d", N, len(b));
|
||||||
|
|
|
||||||
|
|
@ -12,67 +12,66 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
NoCompression = 0;
|
NoCompression = 0;
|
||||||
BestSpeed = 1;
|
BestSpeed = 1;
|
||||||
fastCompression = 3;
|
fastCompression = 3;
|
||||||
BestCompression = 9;
|
BestCompression = 9;
|
||||||
DefaultCompression = -1;
|
DefaultCompression = -1;
|
||||||
|
logMaxOffsetSize = 15; // Standard DEFLATE
|
||||||
logMaxOffsetSize = 15; // Standard DEFLATE
|
wideLogMaxOffsetSize = 22; // Wide DEFLATE
|
||||||
wideLogMaxOffsetSize = 22; // Wide DEFLATE
|
minMatchLength = 3; // The smallest match that the deflater looks for
|
||||||
minMatchLength = 3; // The smallest match that the deflater looks for
|
maxMatchLength = 258; // The longest match for the deflater
|
||||||
maxMatchLength = 258; // The longest match for the deflater
|
minOffsetSize = 1; // The shortest offset that makes any sence
|
||||||
minOffsetSize = 1; // The shortest offset that makes any sence
|
|
||||||
|
|
||||||
// The maximum number of tokens we put into a single flat block, just too
|
// The maximum number of tokens we put into a single flat block, just too
|
||||||
// stop things from getting too large.
|
// stop things from getting too large.
|
||||||
maxFlateBlockTokens = 1 << 14;
|
maxFlateBlockTokens = 1<<14;
|
||||||
maxStoreBlockSize = 65535;
|
maxStoreBlockSize = 65535;
|
||||||
hashBits = 15;
|
hashBits = 15;
|
||||||
hashSize = 1 << hashBits;
|
hashSize = 1<<hashBits;
|
||||||
hashMask = (1 << hashBits) - 1;
|
hashMask = (1<<hashBits)-1;
|
||||||
hashShift = (hashBits + minMatchLength - 1) / minMatchLength;
|
hashShift = (hashBits + minMatchLength - 1) / minMatchLength;
|
||||||
)
|
)
|
||||||
|
|
||||||
type syncPipeReader struct {
|
type syncPipeReader struct {
|
||||||
*io.PipeReader;
|
*io.PipeReader;
|
||||||
closeChan chan bool;
|
closeChan chan bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sr *syncPipeReader) CloseWithError(err os.Error) os.Error {
|
func (sr *syncPipeReader) CloseWithError(err os.Error) os.Error {
|
||||||
retErr := sr.PipeReader.CloseWithError(err);
|
retErr := sr.PipeReader.CloseWithError(err);
|
||||||
sr.closeChan <- true; // finish writer close
|
sr.closeChan <- true; // finish writer close
|
||||||
return retErr;
|
return retErr;
|
||||||
}
|
}
|
||||||
|
|
||||||
type syncPipeWriter struct {
|
type syncPipeWriter struct {
|
||||||
*io.PipeWriter;
|
*io.PipeWriter;
|
||||||
closeChan chan bool;
|
closeChan chan bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
type compressionLevel struct {
|
type compressionLevel struct {
|
||||||
good, lazy, nice, chain, fastSkipHashing int;
|
good, lazy, nice, chain, fastSkipHashing int;
|
||||||
}
|
}
|
||||||
|
|
||||||
var levels = [] compressionLevel {
|
var levels = []compressionLevel{
|
||||||
compressionLevel {}, // 0
|
compressionLevel{}, // 0
|
||||||
// For levels 1-3 we don't bother trying with lazy matches
|
// For levels 1-3 we don't bother trying with lazy matches
|
||||||
compressionLevel { 3, 0, 8, 4, 4, },
|
compressionLevel{3, 0, 8, 4, 4},
|
||||||
compressionLevel { 3, 0, 16, 8, 5, },
|
compressionLevel{3, 0, 16, 8, 5},
|
||||||
compressionLevel { 3, 0, 32, 32, 6 },
|
compressionLevel{3, 0, 32, 32, 6},
|
||||||
// Levels 4-9 use increasingly more lazy matching
|
// Levels 4-9 use increasingly more lazy matching
|
||||||
// and increasingly stringent conditions for "good enough".
|
// and increasingly stringent conditions for "good enough".
|
||||||
compressionLevel { 4, 4, 16, 16, math.MaxInt32 },
|
compressionLevel{4, 4, 16, 16, math.MaxInt32},
|
||||||
compressionLevel { 8, 16, 32, 32, math.MaxInt32 },
|
compressionLevel{8, 16, 32, 32, math.MaxInt32},
|
||||||
compressionLevel { 8, 16, 128, 128, math.MaxInt32 },
|
compressionLevel{8, 16, 128, 128, math.MaxInt32},
|
||||||
compressionLevel { 8, 32, 128, 256, math.MaxInt32 },
|
compressionLevel{8, 32, 128, 256, math.MaxInt32},
|
||||||
compressionLevel { 32, 128, 258, 1024, math.MaxInt32 },
|
compressionLevel{32, 128, 258, 1024, math.MaxInt32},
|
||||||
compressionLevel { 32, 258, 258, 4096, math.MaxInt32 },
|
compressionLevel{32, 258, 258, 4096, math.MaxInt32},
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sw *syncPipeWriter) Close() os.Error {
|
func (sw *syncPipeWriter) Close() os.Error {
|
||||||
err := sw.PipeWriter.Close();
|
err := sw.PipeWriter.Close();
|
||||||
<-sw.closeChan; // wait for reader close
|
<-sw.closeChan; // wait for reader close
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -84,40 +83,40 @@ func syncPipe() (*syncPipeReader, *syncPipeWriter) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type deflater struct {
|
type deflater struct {
|
||||||
level int;
|
level int;
|
||||||
logWindowSize uint;
|
logWindowSize uint;
|
||||||
w *huffmanBitWriter;
|
w *huffmanBitWriter;
|
||||||
r io.Reader;
|
r io.Reader;
|
||||||
// (1 << logWindowSize) - 1.
|
// (1 << logWindowSize) - 1.
|
||||||
windowMask int;
|
windowMask int;
|
||||||
|
|
||||||
// hashHead[hashValue] contains the largest inputIndex with the specified hash value
|
// hashHead[hashValue] contains the largest inputIndex with the specified hash value
|
||||||
hashHead []int;
|
hashHead []int;
|
||||||
|
|
||||||
// If hashHead[hashValue] is within the current window, then
|
// If hashHead[hashValue] is within the current window, then
|
||||||
// hashPrev[hashHead[hashValue] & windowMask] contains the previous index
|
// hashPrev[hashHead[hashValue] & windowMask] contains the previous index
|
||||||
// with the same hash value.
|
// with the same hash value.
|
||||||
hashPrev []int;
|
hashPrev []int;
|
||||||
|
|
||||||
// If we find a match of length >= niceMatch, then we don't bother searching
|
// If we find a match of length >= niceMatch, then we don't bother searching
|
||||||
// any further.
|
// any further.
|
||||||
niceMatch int;
|
niceMatch int;
|
||||||
|
|
||||||
// If we find a match of length >= goodMatch, we only do a half-hearted
|
// If we find a match of length >= goodMatch, we only do a half-hearted
|
||||||
// effort at doing lazy matching starting at the next character
|
// effort at doing lazy matching starting at the next character
|
||||||
goodMatch int;
|
goodMatch int;
|
||||||
|
|
||||||
// The maximum number of chains we look at when finding a match
|
// The maximum number of chains we look at when finding a match
|
||||||
maxChainLength int;
|
maxChainLength int;
|
||||||
|
|
||||||
// The sliding window we use for matching
|
// The sliding window we use for matching
|
||||||
window []byte;
|
window []byte;
|
||||||
|
|
||||||
// The index just past the last valid character
|
// The index just past the last valid character
|
||||||
windowEnd int;
|
windowEnd int;
|
||||||
|
|
||||||
// index in "window" at which current block starts
|
// index in "window" at which current block starts
|
||||||
blockStart int;
|
blockStart int;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *deflater) flush() os.Error {
|
func (d *deflater) flush() os.Error {
|
||||||
|
|
@ -127,9 +126,9 @@ func (d *deflater) flush() os.Error {
|
||||||
|
|
||||||
func (d *deflater) fillWindow(index int) (int, os.Error) {
|
func (d *deflater) fillWindow(index int) (int, os.Error) {
|
||||||
wSize := d.windowMask + 1;
|
wSize := d.windowMask + 1;
|
||||||
if index >= wSize + wSize - (minMatchLength + maxMatchLength) {
|
if index >= wSize+wSize-(minMatchLength + maxMatchLength) {
|
||||||
// shift the window by wSize
|
// shift the window by wSize
|
||||||
bytes.Copy(d.window, d.window[wSize:2*wSize]);
|
bytes.Copy(d.window, d.window[wSize : 2*wSize]);
|
||||||
index -= wSize;
|
index -= wSize;
|
||||||
d.windowEnd -= wSize;
|
d.windowEnd -= wSize;
|
||||||
if d.blockStart >= wSize {
|
if d.blockStart >= wSize {
|
||||||
|
|
@ -138,10 +137,10 @@ func (d *deflater) fillWindow(index int) (int, os.Error) {
|
||||||
d.blockStart = math.MaxInt32;
|
d.blockStart = math.MaxInt32;
|
||||||
}
|
}
|
||||||
for i, h := range d.hashHead {
|
for i, h := range d.hashHead {
|
||||||
d.hashHead[i] = max(h - wSize, -1);
|
d.hashHead[i] = max(h-wSize, -1);
|
||||||
}
|
}
|
||||||
for i, h := range d.hashPrev {
|
for i, h := range d.hashPrev {
|
||||||
d.hashPrev[i] = max(h - wSize, -1);
|
d.hashPrev[i] = max(h-wSize, -1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var count int;
|
var count int;
|
||||||
|
|
@ -158,7 +157,7 @@ func (d *deflater) writeBlock(tokens []token, index int, eof bool) os.Error {
|
||||||
if index > 0 || eof {
|
if index > 0 || eof {
|
||||||
var window []byte;
|
var window []byte;
|
||||||
if d.blockStart <= index {
|
if d.blockStart <= index {
|
||||||
window = d.window[d.blockStart:index];
|
window = d.window[d.blockStart : index];
|
||||||
}
|
}
|
||||||
d.blockStart = index;
|
d.blockStart = index;
|
||||||
d.w.writeBlock(tokens, eof, window);
|
d.w.writeBlock(tokens, eof, window);
|
||||||
|
|
@ -170,10 +169,10 @@ func (d *deflater) writeBlock(tokens []token, index int, eof bool) os.Error {
|
||||||
// Try to find a match starting at index whose length is greater than prevSize.
|
// Try to find a match starting at index whose length is greater than prevSize.
|
||||||
// We only look at chainCount possibilities before giving up.
|
// We only look at chainCount possibilities before giving up.
|
||||||
func (d *deflater) findMatch(pos int, prevHead int, prevLength int, lookahead int) (length, offset int, ok bool) {
|
func (d *deflater) findMatch(pos int, prevHead int, prevLength int, lookahead int) (length, offset int, ok bool) {
|
||||||
win := d.window[0:pos+min(maxMatchLength, lookahead)];
|
win := d.window[0 : pos + min(maxMatchLength, lookahead)];
|
||||||
|
|
||||||
// We quit when we get a match that's at least nice long
|
// We quit when we get a match that's at least nice long
|
||||||
nice := min(d.niceMatch, len(win) - pos);
|
nice := min(d.niceMatch, len(win)-pos);
|
||||||
|
|
||||||
// If we've got a match that's good enough, only look in 1/4 the chain.
|
// If we've got a match that's good enough, only look in 1/4 the chain.
|
||||||
tries := d.maxChainLength;
|
tries := d.maxChainLength;
|
||||||
|
|
@ -183,21 +182,21 @@ func (d *deflater) findMatch(pos int, prevHead int, prevLength int, lookahead in
|
||||||
}
|
}
|
||||||
|
|
||||||
w0 := win[pos];
|
w0 := win[pos];
|
||||||
w1 := win[pos + 1];
|
w1 := win[pos+1];
|
||||||
wEnd := win[pos + length];
|
wEnd := win[pos+length];
|
||||||
minIndex := pos - (d.windowMask + 1);
|
minIndex := pos-(d.windowMask + 1);
|
||||||
|
|
||||||
for i := prevHead; tries > 0; tries-- {
|
for i := prevHead; tries > 0; tries-- {
|
||||||
if w0 == win[i] && w1 == win[i+1] && wEnd == win[i+length] {
|
if w0 == win[i] && w1 == win[i+1] && wEnd == win[i+length] {
|
||||||
// The hash function ensures that if win[i] and win[i+1] match, win[i+2] matches
|
// The hash function ensures that if win[i] and win[i+1] match, win[i+2] matches
|
||||||
|
|
||||||
n := 3;
|
n := 3;
|
||||||
for pos + n < len(win) && win[i+n] == win[pos+n] {
|
for pos+n < len(win) && win[i+n] == win[pos+n] {
|
||||||
n++;
|
n++;
|
||||||
}
|
}
|
||||||
if n > length && (n > 3 || pos-i <= 4096) {
|
if n > length && (n > 3 || pos-i <= 4096) {
|
||||||
length = n;
|
length = n;
|
||||||
offset = pos - i;
|
offset = pos-i;
|
||||||
ok = true;
|
ok = true;
|
||||||
if n >= nice {
|
if n >= nice {
|
||||||
// The match is good enough that we don't try to find a better one.
|
// The match is good enough that we don't try to find a better one.
|
||||||
|
|
@ -246,7 +245,7 @@ func (d *deflater) storedDeflate() os.Error {
|
||||||
|
|
||||||
func (d *deflater) doDeflate() (err os.Error) {
|
func (d *deflater) doDeflate() (err os.Error) {
|
||||||
// init
|
// init
|
||||||
d.windowMask = 1<<d.logWindowSize - 1;
|
d.windowMask = 1 << d.logWindowSize - 1;
|
||||||
d.hashHead = make([]int, hashSize);
|
d.hashHead = make([]int, hashSize);
|
||||||
d.hashPrev = make([]int, 1 << d.logWindowSize);
|
d.hashPrev = make([]int, 1 << d.logWindowSize);
|
||||||
d.window = make([]byte, 2 << d.logWindowSize);
|
d.window = make([]byte, 2 << d.logWindowSize);
|
||||||
|
|
@ -266,7 +265,7 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
if index, err = d.fillWindow(index); err != nil {
|
if index, err = d.fillWindow(index); err != nil {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
maxOffset := d.windowMask + 1; // (1 << logWindowSize);
|
maxOffset := d.windowMask + 1; // (1 << logWindowSize);
|
||||||
// only need to change when you refill the window
|
// only need to change when you refill the window
|
||||||
windowEnd := d.windowEnd;
|
windowEnd := d.windowEnd;
|
||||||
maxInsertIndex := windowEnd - (minMatchLength - 1);
|
maxInsertIndex := windowEnd - (minMatchLength - 1);
|
||||||
|
|
@ -274,7 +273,7 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
|
|
||||||
hash := int(0);
|
hash := int(0);
|
||||||
if index < maxInsertIndex {
|
if index < maxInsertIndex {
|
||||||
hash = int(d.window[index])<<hashShift + int(d.window[index+1]);
|
hash = int(d.window[index]) << hashShift + int(d.window[index+1]);
|
||||||
}
|
}
|
||||||
chainHead := -1;
|
chainHead := -1;
|
||||||
for {
|
for {
|
||||||
|
|
@ -298,7 +297,7 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
}
|
}
|
||||||
if index < maxInsertIndex {
|
if index < maxInsertIndex {
|
||||||
// Update the hash
|
// Update the hash
|
||||||
hash = (hash<<hashShift + int(d.window[index+2])) & hashMask;
|
hash = (hash << hashShift + int(d.window[index+2]))&hashMask;
|
||||||
chainHead = d.hashHead[hash];
|
chainHead = d.hashHead[hash];
|
||||||
d.hashPrev[index & d.windowMask] = chainHead;
|
d.hashPrev[index & d.windowMask] = chainHead;
|
||||||
d.hashHead[hash] = index;
|
d.hashHead[hash] = index;
|
||||||
|
|
@ -311,8 +310,8 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
|
|
||||||
if chainHead >= minIndex &&
|
if chainHead >= minIndex &&
|
||||||
(isFastDeflate && lookahead > minMatchLength - 1 ||
|
(isFastDeflate && lookahead > minMatchLength - 1 ||
|
||||||
!isFastDeflate && lookahead > prevLength && prevLength < lazyMatch) {
|
!isFastDeflate && lookahead > prevLength && prevLength < lazyMatch) {
|
||||||
if newLength, newOffset, ok := d.findMatch(index, chainHead, minMatchLength -1 , lookahead); ok {
|
if newLength, newOffset, ok := d.findMatch(index, chainHead, minMatchLength - 1, lookahead); ok {
|
||||||
length = newLength;
|
length = newLength;
|
||||||
offset = newOffset;
|
offset = newOffset;
|
||||||
}
|
}
|
||||||
|
|
@ -334,13 +333,13 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
if length <= l.fastSkipHashing {
|
if length <= l.fastSkipHashing {
|
||||||
var newIndex int;
|
var newIndex int;
|
||||||
if isFastDeflate {
|
if isFastDeflate {
|
||||||
newIndex = index + length;
|
newIndex = index+length;
|
||||||
} else {
|
} else {
|
||||||
newIndex = prevLength - 1;
|
newIndex = prevLength - 1;
|
||||||
}
|
}
|
||||||
for index++; index < newIndex; index++ {
|
for index++; index < newIndex; index++ {
|
||||||
if index < maxInsertIndex {
|
if index < maxInsertIndex {
|
||||||
hash = (hash<<hashShift + int(d.window[index+2])) & hashMask;
|
hash = (hash << hashShift + int(d.window[index+2]))&hashMask;
|
||||||
// Get previous value with the same hash.
|
// Get previous value with the same hash.
|
||||||
// Our chain should point to the previous value.
|
// Our chain should point to the previous value.
|
||||||
d.hashPrev[index & d.windowMask] = d.hashHead[hash];
|
d.hashPrev[index & d.windowMask] = d.hashHead[hash];
|
||||||
|
|
@ -356,7 +355,7 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
// For matches this long, we don't bother inserting each individual
|
// For matches this long, we don't bother inserting each individual
|
||||||
// item into the table.
|
// item into the table.
|
||||||
index += length;
|
index += length;
|
||||||
hash = (int(d.window[index])<<hashShift + int(d.window[index+1]));
|
hash = (int(d.window[index]) << hashShift + int(d.window[index+1]));
|
||||||
}
|
}
|
||||||
if ti == maxFlateBlockTokens {
|
if ti == maxFlateBlockTokens {
|
||||||
// The block includes the current character
|
// The block includes the current character
|
||||||
|
|
@ -367,11 +366,11 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if isFastDeflate || byteAvailable {
|
if isFastDeflate || byteAvailable {
|
||||||
i := index - 1;
|
i := index-1;
|
||||||
if isFastDeflate {
|
if isFastDeflate {
|
||||||
i = index;
|
i = index;
|
||||||
}
|
}
|
||||||
tokens[ti] = literalToken(uint32(d.window[i]) & 0xFF);
|
tokens[ti] = literalToken(uint32(d.window[i])&0xFF);
|
||||||
ti++;
|
ti++;
|
||||||
if ti == maxFlateBlockTokens {
|
if ti == maxFlateBlockTokens {
|
||||||
if err = d.writeBlock(tokens, i+1, false); err != nil {
|
if err = d.writeBlock(tokens, i+1, false); err != nil {
|
||||||
|
|
@ -389,7 +388,7 @@ func (d *deflater) doDeflate() (err os.Error) {
|
||||||
}
|
}
|
||||||
if byteAvailable {
|
if byteAvailable {
|
||||||
// There is still one pending token that needs to be flushed
|
// There is still one pending token that needs to be flushed
|
||||||
tokens[ti] = literalToken(uint32(d.window[index - 1]) & 0xFF);
|
tokens[ti] = literalToken(uint32(d.window[index-1])&0xFF);
|
||||||
ti++;
|
ti++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -416,7 +415,7 @@ func (d *deflater) deflater(r io.Reader, w io.Writer, level int, logWindowSize u
|
||||||
case 1 <= level && level <= 9:
|
case 1 <= level && level <= 9:
|
||||||
err = d.doDeflate();
|
err = d.doDeflate();
|
||||||
default:
|
default:
|
||||||
return WrongValueError { "level", 0, 9, int32(level) };
|
return WrongValueError{"level", 0, 9, int32(level)};
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -13,65 +13,66 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type deflateTest struct {
|
type deflateTest struct {
|
||||||
in []byte;
|
in []byte;
|
||||||
level int;
|
level int;
|
||||||
out []byte;
|
out []byte;
|
||||||
}
|
}
|
||||||
|
|
||||||
type deflateInflateTest struct {
|
type deflateInflateTest struct {
|
||||||
in [] byte;
|
in []byte;
|
||||||
}
|
}
|
||||||
|
|
||||||
type reverseBitsTest struct {
|
type reverseBitsTest struct {
|
||||||
in uint16;
|
in uint16;
|
||||||
bitCount uint8;
|
bitCount uint8;
|
||||||
out uint16;
|
out uint16;
|
||||||
}
|
}
|
||||||
|
|
||||||
var deflateTests = []*deflateTest {
|
var deflateTests = []*deflateTest{
|
||||||
&deflateTest { []byte{ }, 0, []byte{ 1, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{}, 0, []byte{1, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11 }, -1, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, -1, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11 }, DefaultCompression, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, DefaultCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11 }, 4, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, 4, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||||
|
|
||||||
&deflateTest { []byte{ 0x11 }, 0, []byte { 0, 1, 0, 254, 255, 17, 1, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, 0, []byte{0, 1, 0, 254, 255, 17, 1, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x12 }, 0, []byte{ 0, 2, 0, 253, 255, 17, 18, 1, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11, 0x12}, 0, []byte{0, 2, 0, 253, 255, 17, 18, 1, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 0,
|
&deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 0,
|
||||||
[]byte{ 0, 8, 0, 247, 255, 17, 17, 17, 17, 17, 17, 17, 17, 1, 0, 0, 255, 255 } },
|
[]byte{0, 8, 0, 247, 255, 17, 17, 17, 17, 17, 17, 17, 17, 1, 0, 0, 255, 255},
|
||||||
&deflateTest { []byte{}, 1, []byte{ 1, 0, 0, 255, 255 } },
|
},
|
||||||
&deflateTest { []byte{ 0x11 }, 1, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{}, 1, []byte{1, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x12 }, 1, []byte{ 18, 20, 2, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, 1, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 1, []byte{ 18, 132, 2, 64, 0, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11, 0x12}, 1, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{}, 9, []byte{ 1, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 1, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11 }, 9, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{}, 9, []byte{1, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x12 }, 9, []byte{ 18, 20, 2, 4, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11}, 9, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||||
&deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 9, []byte{ 18, 132, 2, 64, 0, 0, 0, 255, 255 } },
|
&deflateTest{[]byte{0x11, 0x12}, 9, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
|
||||||
|
&deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 9, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
|
||||||
}
|
}
|
||||||
|
|
||||||
var deflateInflateTests = []*deflateInflateTest {
|
var deflateInflateTests = []*deflateInflateTest{
|
||||||
&deflateInflateTest { []byte{ } },
|
&deflateInflateTest{[]byte{}},
|
||||||
&deflateInflateTest { []byte{ 0x11 } },
|
&deflateInflateTest{[]byte{0x11}},
|
||||||
&deflateInflateTest { []byte{ 0x11, 0x12 } },
|
&deflateInflateTest{[]byte{0x11, 0x12}},
|
||||||
&deflateInflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 } },
|
&deflateInflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}},
|
||||||
&deflateInflateTest { []byte{ 0x11, 0x10, 0x13, 0x41, 0x21, 0x21, 0x41, 0x13, 0x87, 0x78, 0x13 } },
|
&deflateInflateTest{[]byte{0x11, 0x10, 0x13, 0x41, 0x21, 0x21, 0x41, 0x13, 0x87, 0x78, 0x13}},
|
||||||
&deflateInflateTest { getLargeDataChunk() },
|
&deflateInflateTest{getLargeDataChunk()},
|
||||||
}
|
}
|
||||||
|
|
||||||
var reverseBitsTests = []*reverseBitsTest {
|
var reverseBitsTests = []*reverseBitsTest{
|
||||||
&reverseBitsTest { 1, 1, 1 },
|
&reverseBitsTest{1, 1, 1},
|
||||||
&reverseBitsTest { 1, 2, 2 },
|
&reverseBitsTest{1, 2, 2},
|
||||||
&reverseBitsTest { 1, 3, 4 },
|
&reverseBitsTest{1, 3, 4},
|
||||||
&reverseBitsTest { 1, 4, 8 },
|
&reverseBitsTest{1, 4, 8},
|
||||||
&reverseBitsTest { 1, 5, 16 },
|
&reverseBitsTest{1, 5, 16},
|
||||||
&reverseBitsTest { 17, 5, 17 },
|
&reverseBitsTest{17, 5, 17},
|
||||||
&reverseBitsTest { 257, 9, 257 },
|
&reverseBitsTest{257, 9, 257},
|
||||||
&reverseBitsTest { 29, 5, 23 },
|
&reverseBitsTest{29, 5, 23},
|
||||||
}
|
}
|
||||||
|
|
||||||
func getLargeDataChunk() []byte {
|
func getLargeDataChunk() []byte {
|
||||||
result := make([]byte, 100000);
|
result := make([]byte, 100000);
|
||||||
for i := range result {
|
for i := range result {
|
||||||
result[i] = byte(int64(i) * int64(i) & 0xFF);
|
result[i] = byte(int64(i)*int64(i)&0xFF);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
@ -107,7 +108,7 @@ func testToFromWithLevel(t *testing.T, level int, input []byte, name string) os.
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
func testToFrom(t *testing.T, input[] byte, name string) {
|
func testToFrom(t *testing.T, input []byte, name string) {
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
testToFromWithLevel(t, i, input, name);
|
testToFromWithLevel(t, i, input, name);
|
||||||
}
|
}
|
||||||
|
|
@ -134,130 +135,130 @@ func TestDeflateInflateString(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func getEdata() string {
|
func getEdata() string {
|
||||||
return "2.718281828459045235360287471352662497757247093699959574966967627724076630353547"+
|
return "2.718281828459045235360287471352662497757247093699959574966967627724076630353547" +
|
||||||
"59457138217852516642742746639193200305992181741359662904357290033429526059563073"+
|
"59457138217852516642742746639193200305992181741359662904357290033429526059563073" +
|
||||||
"81323286279434907632338298807531952510190115738341879307021540891499348841675092"+
|
"81323286279434907632338298807531952510190115738341879307021540891499348841675092" +
|
||||||
"44761460668082264800168477411853742345442437107539077744992069551702761838606261"+
|
"44761460668082264800168477411853742345442437107539077744992069551702761838606261" +
|
||||||
"33138458300075204493382656029760673711320070932870912744374704723069697720931014"+
|
"33138458300075204493382656029760673711320070932870912744374704723069697720931014" +
|
||||||
"16928368190255151086574637721112523897844250569536967707854499699679468644549059"+
|
"16928368190255151086574637721112523897844250569536967707854499699679468644549059" +
|
||||||
"87931636889230098793127736178215424999229576351482208269895193668033182528869398"+
|
"87931636889230098793127736178215424999229576351482208269895193668033182528869398" +
|
||||||
"49646510582093923982948879332036250944311730123819706841614039701983767932068328"+
|
"49646510582093923982948879332036250944311730123819706841614039701983767932068328" +
|
||||||
"23764648042953118023287825098194558153017567173613320698112509961818815930416903"+
|
"23764648042953118023287825098194558153017567173613320698112509961818815930416903" +
|
||||||
"51598888519345807273866738589422879228499892086805825749279610484198444363463244"+
|
"51598888519345807273866738589422879228499892086805825749279610484198444363463244" +
|
||||||
"96848756023362482704197862320900216099023530436994184914631409343173814364054625"+
|
"96848756023362482704197862320900216099023530436994184914631409343173814364054625" +
|
||||||
"31520961836908887070167683964243781405927145635490613031072085103837505101157477"+
|
"31520961836908887070167683964243781405927145635490613031072085103837505101157477" +
|
||||||
"04171898610687396965521267154688957035035402123407849819334321068170121005627880"+
|
"04171898610687396965521267154688957035035402123407849819334321068170121005627880" +
|
||||||
"23519303322474501585390473041995777709350366041699732972508868769664035557071622"+
|
"23519303322474501585390473041995777709350366041699732972508868769664035557071622" +
|
||||||
"68447162560798826517871341951246652010305921236677194325278675398558944896970964"+
|
"68447162560798826517871341951246652010305921236677194325278675398558944896970964" +
|
||||||
"09754591856956380236370162112047742722836489613422516445078182442352948636372141"+
|
"09754591856956380236370162112047742722836489613422516445078182442352948636372141" +
|
||||||
"74023889344124796357437026375529444833799801612549227850925778256209262264832627"+
|
"74023889344124796357437026375529444833799801612549227850925778256209262264832627" +
|
||||||
"79333865664816277251640191059004916449982893150566047258027786318641551956532442"+
|
"79333865664816277251640191059004916449982893150566047258027786318641551956532442" +
|
||||||
"58698294695930801915298721172556347546396447910145904090586298496791287406870504"+
|
"58698294695930801915298721172556347546396447910145904090586298496791287406870504" +
|
||||||
"89585867174798546677575732056812884592054133405392200011378630094556068816674001"+
|
"89585867174798546677575732056812884592054133405392200011378630094556068816674001" +
|
||||||
"69842055804033637953764520304024322566135278369511778838638744396625322498506549"+
|
"69842055804033637953764520304024322566135278369511778838638744396625322498506549" +
|
||||||
"95886234281899707733276171783928034946501434558897071942586398772754710962953741"+
|
"95886234281899707733276171783928034946501434558897071942586398772754710962953741" +
|
||||||
"52111513683506275260232648472870392076431005958411661205452970302364725492966693"+
|
"52111513683506275260232648472870392076431005958411661205452970302364725492966693" +
|
||||||
"81151373227536450988890313602057248176585118063036442812314965507047510254465011"+
|
"81151373227536450988890313602057248176585118063036442812314965507047510254465011" +
|
||||||
"72721155519486685080036853228183152196003735625279449515828418829478761085263981"+
|
"72721155519486685080036853228183152196003735625279449515828418829478761085263981" +
|
||||||
"39559900673764829224437528718462457803619298197139914756448826260390338144182326"+
|
"39559900673764829224437528718462457803619298197139914756448826260390338144182326" +
|
||||||
"25150974827987779964373089970388867782271383605772978824125611907176639465070633"+
|
"25150974827987779964373089970388867782271383605772978824125611907176639465070633" +
|
||||||
"04527954661855096666185664709711344474016070462621568071748187784437143698821855"+
|
"04527954661855096666185664709711344474016070462621568071748187784437143698821855" +
|
||||||
"96709591025968620023537185887485696522000503117343920732113908032936344797273559"+
|
"96709591025968620023537185887485696522000503117343920732113908032936344797273559" +
|
||||||
"55277349071783793421637012050054513263835440001863239914907054797780566978533580"+
|
"55277349071783793421637012050054513263835440001863239914907054797780566978533580" +
|
||||||
"48966906295119432473099587655236812859041383241160722602998330535370876138939639"+
|
"48966906295119432473099587655236812859041383241160722602998330535370876138939639" +
|
||||||
"17795745401613722361878936526053815584158718692553860616477983402543512843961294"+
|
"17795745401613722361878936526053815584158718692553860616477983402543512843961294" +
|
||||||
"60352913325942794904337299085731580290958631382683291477116396337092400316894586"+
|
"60352913325942794904337299085731580290958631382683291477116396337092400316894586" +
|
||||||
"36060645845925126994655724839186564209752685082307544254599376917041977780085362"+
|
"36060645845925126994655724839186564209752685082307544254599376917041977780085362" +
|
||||||
"73094171016343490769642372229435236612557250881477922315197477806056967253801718"+
|
"73094171016343490769642372229435236612557250881477922315197477806056967253801718" +
|
||||||
"07763603462459278778465850656050780844211529697521890874019660906651803516501792"+
|
"07763603462459278778465850656050780844211529697521890874019660906651803516501792" +
|
||||||
"50461950136658543663271254963990854914420001457476081930221206602433009641270489"+
|
"50461950136658543663271254963990854914420001457476081930221206602433009641270489" +
|
||||||
"43903971771951806990869986066365832322787093765022601492910115171776359446020232"+
|
"43903971771951806990869986066365832322787093765022601492910115171776359446020232" +
|
||||||
"49300280401867723910288097866605651183260043688508817157238669842242201024950551"+
|
"49300280401867723910288097866605651183260043688508817157238669842242201024950551" +
|
||||||
"88169480322100251542649463981287367765892768816359831247788652014117411091360116"+
|
"88169480322100251542649463981287367765892768816359831247788652014117411091360116" +
|
||||||
"49950766290779436460058519419985601626479076153210387275571269925182756879893027"+
|
"49950766290779436460058519419985601626479076153210387275571269925182756879893027" +
|
||||||
"61761146162549356495903798045838182323368612016243736569846703785853305275833337"+
|
"61761146162549356495903798045838182323368612016243736569846703785853305275833337" +
|
||||||
"93990752166069238053369887956513728559388349989470741618155012539706464817194670"+
|
"93990752166069238053369887956513728559388349989470741618155012539706464817194670" +
|
||||||
"83481972144888987906765037959036696724949925452790337296361626589760394985767413"+
|
"83481972144888987906765037959036696724949925452790337296361626589760394985767413" +
|
||||||
"97359441023744329709355477982629614591442936451428617158587339746791897571211956"+
|
"97359441023744329709355477982629614591442936451428617158587339746791897571211956" +
|
||||||
"18738578364475844842355558105002561149239151889309946342841393608038309166281881"+
|
"18738578364475844842355558105002561149239151889309946342841393608038309166281881" +
|
||||||
"15037152849670597416256282360921680751501777253874025642534708790891372917228286"+
|
"15037152849670597416256282360921680751501777253874025642534708790891372917228286" +
|
||||||
"11515915683725241630772254406337875931059826760944203261924285317018781772960235"+
|
"11515915683725241630772254406337875931059826760944203261924285317018781772960235" +
|
||||||
"41306067213604600038966109364709514141718577701418060644363681546444005331608778"+
|
"41306067213604600038966109364709514141718577701418060644363681546444005331608778" +
|
||||||
"31431744408119494229755993140118886833148328027065538330046932901157441475631399"+
|
"31431744408119494229755993140118886833148328027065538330046932901157441475631399" +
|
||||||
"97221703804617092894579096271662260740718749975359212756084414737823303270330168"+
|
"97221703804617092894579096271662260740718749975359212756084414737823303270330168" +
|
||||||
"23719364800217328573493594756433412994302485023573221459784328264142168487872167"+
|
"23719364800217328573493594756433412994302485023573221459784328264142168487872167" +
|
||||||
"33670106150942434569844018733128101079451272237378861260581656680537143961278887"+
|
"33670106150942434569844018733128101079451272237378861260581656680537143961278887" +
|
||||||
"32527373890392890506865324138062796025930387727697783792868409325365880733988457"+
|
"32527373890392890506865324138062796025930387727697783792868409325365880733988457" +
|
||||||
"21874602100531148335132385004782716937621800490479559795929059165547050577751430"+
|
"21874602100531148335132385004782716937621800490479559795929059165547050577751430" +
|
||||||
"81751126989851884087185640260353055837378324229241856256442550226721559802740126"+
|
"81751126989851884087185640260353055837378324229241856256442550226721559802740126" +
|
||||||
"17971928047139600689163828665277009752767069777036439260224372841840883251848770"+
|
"17971928047139600689163828665277009752767069777036439260224372841840883251848770" +
|
||||||
"47263844037953016690546593746161932384036389313136432713768884102681121989127522"+
|
"47263844037953016690546593746161932384036389313136432713768884102681121989127522" +
|
||||||
"30562567562547017250863497653672886059667527408686274079128565769963137897530346"+
|
"30562567562547017250863497653672886059667527408686274079128565769963137897530346" +
|
||||||
"60616669804218267724560530660773899624218340859882071864682623215080288286359746"+
|
"60616669804218267724560530660773899624218340859882071864682623215080288286359746" +
|
||||||
"83965435885668550377313129658797581050121491620765676995065971534476347032085321"+
|
"83965435885668550377313129658797581050121491620765676995065971534476347032085321" +
|
||||||
"56036748286083786568030730626576334697742956346437167093971930608769634953288468"+
|
"56036748286083786568030730626576334697742956346437167093971930608769634953288468" +
|
||||||
"33613038829431040800296873869117066666146800015121143442256023874474325250769387"+
|
"33613038829431040800296873869117066666146800015121143442256023874474325250769387" +
|
||||||
"07777519329994213727721125884360871583483562696166198057252661220679754062106208"+
|
"07777519329994213727721125884360871583483562696166198057252661220679754062106208" +
|
||||||
"06498829184543953015299820925030054982570433905535701686531205264956148572492573"+
|
"06498829184543953015299820925030054982570433905535701686531205264956148572492573" +
|
||||||
"86206917403695213533732531666345466588597286659451136441370331393672118569553952"+
|
"86206917403695213533732531666345466588597286659451136441370331393672118569553952" +
|
||||||
"10845840724432383558606310680696492485123263269951460359603729725319836842336390"+
|
"10845840724432383558606310680696492485123263269951460359603729725319836842336390" +
|
||||||
"46321367101161928217111502828016044880588023820319814930963695967358327420249882"+
|
"46321367101161928217111502828016044880588023820319814930963695967358327420249882" +
|
||||||
"45684941273860566491352526706046234450549227581151709314921879592718001940968866"+
|
"45684941273860566491352526706046234450549227581151709314921879592718001940968866" +
|
||||||
"98683703730220047531433818109270803001720593553052070070607223399946399057131158"+
|
"98683703730220047531433818109270803001720593553052070070607223399946399057131158" +
|
||||||
"70996357773590271962850611465148375262095653467132900259943976631145459026858989"+
|
"70996357773590271962850611465148375262095653467132900259943976631145459026858989" +
|
||||||
"79115837093419370441155121920117164880566945938131183843765620627846310490346293"+
|
"79115837093419370441155121920117164880566945938131183843765620627846310490346293" +
|
||||||
"95002945834116482411496975832601180073169943739350696629571241027323913874175492"+
|
"95002945834116482411496975832601180073169943739350696629571241027323913874175492" +
|
||||||
"30718624545432220395527352952402459038057445028922468862853365422138157221311632"+
|
"30718624545432220395527352952402459038057445028922468862853365422138157221311632" +
|
||||||
"88112052146489805180092024719391710555390113943316681515828843687606961102505171"+
|
"88112052146489805180092024719391710555390113943316681515828843687606961102505171" +
|
||||||
"00739276238555338627255353883096067164466237092264680967125406186950214317621166"+
|
"00739276238555338627255353883096067164466237092264680967125406186950214317621166" +
|
||||||
"81400975952814939072226011126811531083873176173232352636058381731510345957365382"+
|
"81400975952814939072226011126811531083873176173232352636058381731510345957365382" +
|
||||||
"23534992935822836851007810884634349983518404451704270189381994243410090575376257"+
|
"23534992935822836851007810884634349983518404451704270189381994243410090575376257" +
|
||||||
"76757111809008816418331920196262341628816652137471732547772778348877436651882875"+
|
"76757111809008816418331920196262341628816652137471732547772778348877436651882875" +
|
||||||
"21566857195063719365653903894493664217640031215278702223664636357555035655769488"+
|
"21566857195063719365653903894493664217640031215278702223664636357555035655769488" +
|
||||||
"86549500270853923617105502131147413744106134445544192101336172996285694899193369"+
|
"86549500270853923617105502131147413744106134445544192101336172996285694899193369" +
|
||||||
"18472947858072915608851039678195942983318648075608367955149663644896559294818785"+
|
"18472947858072915608851039678195942983318648075608367955149663644896559294818785" +
|
||||||
"17840387733262470519450504198477420141839477312028158868457072905440575106012852"+
|
"17840387733262470519450504198477420141839477312028158868457072905440575106012852" +
|
||||||
"58056594703046836344592652552137008068752009593453607316226118728173928074623094"+
|
"58056594703046836344592652552137008068752009593453607316226118728173928074623094" +
|
||||||
"68536782310609792159936001994623799343421068781349734695924646975250624695861690"+
|
"68536782310609792159936001994623799343421068781349734695924646975250624695861690" +
|
||||||
"91785739765951993929939955675427146549104568607020990126068187049841780791739240"+
|
"91785739765951993929939955675427146549104568607020990126068187049841780791739240" +
|
||||||
"71945996323060254707901774527513186809982284730860766536866855516467702911336827"+
|
"71945996323060254707901774527513186809982284730860766536866855516467702911336827" +
|
||||||
"56310722334672611370549079536583453863719623585631261838715677411873852772292259"+
|
"56310722334672611370549079536583453863719623585631261838715677411873852772292259" +
|
||||||
"47433737856955384562468010139057278710165129666367644518724656537304024436841408"+
|
"47433737856955384562468010139057278710165129666367644518724656537304024436841408" +
|
||||||
"14488732957847348490003019477888020460324660842875351848364959195082888323206522"+
|
"14488732957847348490003019477888020460324660842875351848364959195082888323206522" +
|
||||||
"12810419044804724794929134228495197002260131043006241071797150279343326340799596"+
|
"12810419044804724794929134228495197002260131043006241071797150279343326340799596" +
|
||||||
"05314460532304885289729176598760166678119379323724538572096075822771784833616135"+
|
"05314460532304885289729176598760166678119379323724538572096075822771784833616135" +
|
||||||
"82612896226118129455927462767137794487586753657544861407611931125958512655759734"+
|
"82612896226118129455927462767137794487586753657544861407611931125958512655759734" +
|
||||||
"57301533364263076798544338576171533346232527057200530398828949903425956623297578"+
|
"57301533364263076798544338576171533346232527057200530398828949903425956623297578" +
|
||||||
"24887350292591668258944568946559926584547626945287805165017206747854178879822768"+
|
"24887350292591668258944568946559926584547626945287805165017206747854178879822768" +
|
||||||
"06536650641910973434528878338621726156269582654478205672987756426325321594294418"+
|
"06536650641910973434528878338621726156269582654478205672987756426325321594294418" +
|
||||||
"03994321700009054265076309558846589517170914760743713689331946909098190450129030"+
|
"03994321700009054265076309558846589517170914760743713689331946909098190450129030" +
|
||||||
"70995662266203031826493657336984195557769637876249188528656866076005660256054457"+
|
"70995662266203031826493657336984195557769637876249188528656866076005660256054457" +
|
||||||
"11337286840205574416030837052312242587223438854123179481388550075689381124935386"+
|
"11337286840205574416030837052312242587223438854123179481388550075689381124935386" +
|
||||||
"31863528708379984569261998179452336408742959118074745341955142035172618420084550"+
|
"31863528708379984569261998179452336408742959118074745341955142035172618420084550" +
|
||||||
"91708456823682008977394558426792142734775608796442792027083121501564063413416171"+
|
"91708456823682008977394558426792142734775608796442792027083121501564063413416171" +
|
||||||
"66448069815483764491573900121217041547872591998943825364950514771379399147205219"+
|
"66448069815483764491573900121217041547872591998943825364950514771379399147205219" +
|
||||||
"52907939613762110723849429061635760459623125350606853765142311534966568371511660"+
|
"52907939613762110723849429061635760459623125350606853765142311534966568371511660" +
|
||||||
"42207963944666211632551577290709784731562782775987881364919512574833287937715714"+
|
"42207963944666211632551577290709784731562782775987881364919512574833287937715714" +
|
||||||
"59091064841642678309949723674420175862269402159407924480541255360431317992696739"+
|
"59091064841642678309949723674420175862269402159407924480541255360431317992696739" +
|
||||||
"15754241929660731239376354213923061787675395871143610408940996608947141834069836"+
|
"15754241929660731239376354213923061787675395871143610408940996608947141834069836" +
|
||||||
"29936753626215452472984642137528910798843813060955526227208375186298370667872244"+
|
"29936753626215452472984642137528910798843813060955526227208375186298370667872244" +
|
||||||
"30195793793786072107254277289071732854874374355781966511716618330881129120245204"+
|
"30195793793786072107254277289071732854874374355781966511716618330881129120245204" +
|
||||||
"04868220007234403502544820283425418788465360259150644527165770004452109773558589"+
|
"04868220007234403502544820283425418788465360259150644527165770004452109773558589" +
|
||||||
"76226554849416217149895323834216001140629507184904277892585527430352213968356790"+
|
"76226554849416217149895323834216001140629507184904277892585527430352213968356790" +
|
||||||
"18076406042138307308774460170842688272261177180842664333651780002171903449234264"+
|
"18076406042138307308774460170842688272261177180842664333651780002171903449234264" +
|
||||||
"26629226145600433738386833555534345300426481847398921562708609565062934040526494"+
|
"26629226145600433738386833555534345300426481847398921562708609565062934040526494" +
|
||||||
"32442614456659212912256488935696550091543064261342526684725949143142393988454324"+
|
"32442614456659212912256488935696550091543064261342526684725949143142393988454324" +
|
||||||
"86327461842846655985332312210466259890141712103446084271616619001257195870793217"+
|
"86327461842846655985332312210466259890141712103446084271616619001257195870793217" +
|
||||||
"56969854401339762209674945418540711844643394699016269835160784892451405894094639"+
|
"56969854401339762209674945418540711844643394699016269835160784892451405894094639" +
|
||||||
"52678073545797003070511636825194877011897640028276484141605872061841852971891540"+
|
"52678073545797003070511636825194877011897640028276484141605872061841852971891540" +
|
||||||
"19688253289309149665345753571427318482016384644832499037886069008072709327673127"+
|
"19688253289309149665345753571427318482016384644832499037886069008072709327673127" +
|
||||||
"58196656394114896171683298045513972950668760474091542042842999354102582911350224"+
|
"58196656394114896171683298045513972950668760474091542042842999354102582911350224" +
|
||||||
"16907694316685742425225090269390348148564513030699251995904363840284292674125734"+
|
"16907694316685742425225090269390348148564513030699251995904363840284292674125734" +
|
||||||
"22447765584177886171737265462085498294498946787350929581652632072258992368768457"+
|
"22447765584177886171737265462085498294498946787350929581652632072258992368768457" +
|
||||||
"01782303809656788311228930580914057261086588484587310165815116753332767488701482"+
|
"01782303809656788311228930580914057261086588484587310165815116753332767488701482" +
|
||||||
"91674197015125597825727074064318086014281490241467804723275976842696339357735429"+
|
"91674197015125597825727074064318086014281490241467804723275976842696339357735429" +
|
||||||
"30186739439716388611764209004068663398856841681003872389214483176070116684503887"+
|
"30186739439716388611764209004068663398856841681003872389214483176070116684503887" +
|
||||||
"21236436704331409115573328018297798873659091665961240202177855885487617616198937"+
|
"21236436704331409115573328018297798873659091665961240202177855885487617616198937" +
|
||||||
"07943800566633648843650891448055710397652146960276625835990519870423001794655367"+
|
"07943800566633648843650891448055710397652146960276625835990519870423001794655367" +
|
||||||
"9";
|
"9";
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -56,8 +56,8 @@ var initDecoderTests = []*InitDecoderTest{
|
||||||
[]int{3, 5, 2, 4, 3, 5, 5, 4, 4, 3, 4, 5},
|
[]int{3, 5, 2, 4, 3, 5, 5, 4, 4, 3, 4, 5},
|
||||||
huffmanDecoder{
|
huffmanDecoder{
|
||||||
2, 5,
|
2, 5,
|
||||||
[maxCodeLen+1]int{2: 0, 4, 13, 31},
|
[maxCodeLen + 1]int{2: 0, 4, 13, 31},
|
||||||
[maxCodeLen+1]int{2: 0, 1, 6, 20},
|
[maxCodeLen + 1]int{2: 0, 1, 6, 20},
|
||||||
// Paper used different code assignment:
|
// Paper used different code assignment:
|
||||||
// 2, 9, 4, 0, 10, 8, 3, 7, 1, 5, 11, 6
|
// 2, 9, 4, 0, 10, 8, 3, 7, 1, 5, 11, 6
|
||||||
// Reordered here so that codes of same length
|
// Reordered here so that codes of same length
|
||||||
|
|
@ -72,8 +72,8 @@ var initDecoderTests = []*InitDecoderTest{
|
||||||
[]int{2, 1, 3, 3},
|
[]int{2, 1, 3, 3},
|
||||||
huffmanDecoder{
|
huffmanDecoder{
|
||||||
1, 3,
|
1, 3,
|
||||||
[maxCodeLen+1]int{1: 0, 2, 7},
|
[maxCodeLen + 1]int{1: 0, 2, 7},
|
||||||
[maxCodeLen+1]int{1: 0, 1, 4},
|
[maxCodeLen + 1]int{1: 0, 1, 4},
|
||||||
[]int{1, 0, 2, 3},
|
[]int{1, 0, 2, 3},
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -84,8 +84,8 @@ var initDecoderTests = []*InitDecoderTest{
|
||||||
[]int{3, 3, 3, 3, 3, 2, 4, 4},
|
[]int{3, 3, 3, 3, 3, 2, 4, 4},
|
||||||
huffmanDecoder{
|
huffmanDecoder{
|
||||||
2, 4,
|
2, 4,
|
||||||
[maxCodeLen+1]int{2: 0, 6, 15},
|
[maxCodeLen + 1]int{2: 0, 6, 15},
|
||||||
[maxCodeLen+1]int{2: 0, 1, 8},
|
[maxCodeLen + 1]int{2: 0, 1, 8},
|
||||||
[]int{5, 0, 1, 2, 3, 4, 6, 7},
|
[]int{5, 0, 1, 2, 3, 4, 6, 7},
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
|
||||||
|
|
@ -13,48 +13,47 @@ import (
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// The largest offset code.
|
// The largest offset code.
|
||||||
offsetCodeCount = 30;
|
offsetCodeCount = 30;
|
||||||
|
|
||||||
// The largest offset code in the extensions.
|
// The largest offset code in the extensions.
|
||||||
extendedOffsetCodeCount = 42;
|
extendedOffsetCodeCount = 42;
|
||||||
|
|
||||||
// The special code used to mark the end of a block.
|
// The special code used to mark the end of a block.
|
||||||
endBlockMarker = 256;
|
endBlockMarker = 256;
|
||||||
|
|
||||||
// The first length code.
|
// The first length code.
|
||||||
lengthCodesStart = 257;
|
lengthCodesStart = 257;
|
||||||
|
|
||||||
// The number of codegen codes.
|
// The number of codegen codes.
|
||||||
codegenCodeCount = 19;
|
codegenCodeCount = 19;
|
||||||
|
badCode = 255;
|
||||||
badCode = 255;
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// The number of extra bits needed by length code X - LENGTH_CODES_START.
|
// The number of extra bits needed by length code X - LENGTH_CODES_START.
|
||||||
var lengthExtraBits = []int8 {
|
var lengthExtraBits = []int8{
|
||||||
/* 257 */ 0, 0, 0,
|
/* 257 */0, 0, 0,
|
||||||
/* 260 */ 0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
|
/* 260 */0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
|
||||||
/* 270 */ 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
|
/* 270 */2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
|
||||||
/* 280 */ 4, 5, 5, 5, 5, 0,
|
/* 280 */4, 5, 5, 5, 5, 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
// The length indicated by length code X - LENGTH_CODES_START.
|
// The length indicated by length code X - LENGTH_CODES_START.
|
||||||
var lengthBase = []uint32 {
|
var lengthBase = []uint32{
|
||||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
|
||||||
12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
|
12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
|
||||||
64, 80, 96, 112, 128, 160, 192, 224, 255
|
64, 80, 96, 112, 128, 160, 192, 224, 255,
|
||||||
}
|
}
|
||||||
|
|
||||||
// offset code word extra bits.
|
// offset code word extra bits.
|
||||||
var offsetExtraBits = []int8 {
|
var offsetExtraBits = []int8{
|
||||||
0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
|
0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
|
||||||
4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
|
4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
|
||||||
9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
|
9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
|
||||||
/* extended window */
|
/* extended window */
|
||||||
14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20,
|
14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20,
|
||||||
}
|
}
|
||||||
|
|
||||||
var offsetBase = []uint32 {
|
var offsetBase = []uint32{
|
||||||
/* normal deflate */
|
/* normal deflate */
|
||||||
0x000000, 0x000001, 0x000002, 0x000003, 0x000004,
|
0x000000, 0x000001, 0x000002, 0x000003, 0x000004,
|
||||||
0x000006, 0x000008, 0x00000c, 0x000010, 0x000018,
|
0x000006, 0x000008, 0x00000c, 0x000010, 0x000018,
|
||||||
|
|
@ -66,37 +65,35 @@ var offsetBase = []uint32 {
|
||||||
/* extended window */
|
/* extended window */
|
||||||
0x008000, 0x00c000, 0x010000, 0x018000, 0x020000,
|
0x008000, 0x00c000, 0x010000, 0x018000, 0x020000,
|
||||||
0x030000, 0x040000, 0x060000, 0x080000, 0x0c0000,
|
0x030000, 0x040000, 0x060000, 0x080000, 0x0c0000,
|
||||||
0x100000, 0x180000, 0x200000, 0x300000
|
0x100000, 0x180000, 0x200000, 0x300000,
|
||||||
}
|
}
|
||||||
|
|
||||||
// The odd order in which the codegen code sizes are written.
|
// The odd order in which the codegen code sizes are written.
|
||||||
var codegenOrder = []uint32 {
|
var codegenOrder = []uint32{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
|
||||||
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
|
|
||||||
}
|
|
||||||
|
|
||||||
type huffmanBitWriter struct {
|
type huffmanBitWriter struct {
|
||||||
w io.Writer;
|
w io.Writer;
|
||||||
// Data waiting to be written is bytes[0:nbytes]
|
// Data waiting to be written is bytes[0:nbytes]
|
||||||
// and then the low nbits of bits.
|
// and then the low nbits of bits.
|
||||||
bits uint32;
|
bits uint32;
|
||||||
nbits uint32;
|
nbits uint32;
|
||||||
bytes [64]byte;
|
bytes [64]byte;
|
||||||
nbytes int;
|
nbytes int;
|
||||||
literalFreq []int32;
|
literalFreq []int32;
|
||||||
offsetFreq []int32;
|
offsetFreq []int32;
|
||||||
codegen []uint8;
|
codegen []uint8;
|
||||||
codegenFreq []int32;
|
codegenFreq []int32;
|
||||||
literalEncoding *huffmanEncoder;
|
literalEncoding *huffmanEncoder;
|
||||||
offsetEncoding *huffmanEncoder;
|
offsetEncoding *huffmanEncoder;
|
||||||
codegenEncoding *huffmanEncoder;
|
codegenEncoding *huffmanEncoder;
|
||||||
err os.Error;
|
err os.Error;
|
||||||
}
|
}
|
||||||
|
|
||||||
type WrongValueError struct {
|
type WrongValueError struct {
|
||||||
name string;
|
name string;
|
||||||
from int32;
|
from int32;
|
||||||
to int32;
|
to int32;
|
||||||
value int32;
|
value int32;
|
||||||
}
|
}
|
||||||
|
|
||||||
func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter {
|
func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter {
|
||||||
|
|
@ -175,7 +172,7 @@ func (w *huffmanBitWriter) writeBytes(bytes []byte) {
|
||||||
n++;
|
n++;
|
||||||
}
|
}
|
||||||
if w.nbits != 0 {
|
if w.nbits != 0 {
|
||||||
w.err = InternalError("writeBytes with unfinished bits");
|
w.err = InternalError("writeBytes with unfinished bits");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if n != 0 {
|
if n != 0 {
|
||||||
|
|
@ -205,7 +202,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
|
||||||
// a copy of the frequencies, and as the place where we put the result.
|
// a copy of the frequencies, and as the place where we put the result.
|
||||||
// This is fine because the output is always shorter than the input used
|
// This is fine because the output is always shorter than the input used
|
||||||
// so far.
|
// so far.
|
||||||
codegen := w.codegen; // cache
|
codegen := w.codegen; // cache
|
||||||
// Copy the concatenated code sizes to codegen. Put a marker at the end.
|
// Copy the concatenated code sizes to codegen. Put a marker at the end.
|
||||||
copyUint8s(codegen[0 : numLiterals], w.literalEncoding.codeBits);
|
copyUint8s(codegen[0 : numLiterals], w.literalEncoding.codeBits);
|
||||||
copyUint8s(codegen[numLiterals : numLiterals + numOffsets], w.offsetEncoding.codeBits);
|
copyUint8s(codegen[numLiterals : numLiterals + numOffsets], w.offsetEncoding.codeBits);
|
||||||
|
|
@ -232,7 +229,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
|
||||||
n := min(count, 6);
|
n := min(count, 6);
|
||||||
codegen[outIndex] = 16;
|
codegen[outIndex] = 16;
|
||||||
outIndex++;
|
outIndex++;
|
||||||
codegen[outIndex] = uint8(n - 3);
|
codegen[outIndex] = uint8(n-3);
|
||||||
outIndex++;
|
outIndex++;
|
||||||
w.codegenFreq[16]++;
|
w.codegenFreq[16]++;
|
||||||
count -= n;
|
count -= n;
|
||||||
|
|
@ -242,7 +239,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
|
||||||
n := min(count, 138);
|
n := min(count, 138);
|
||||||
codegen[outIndex] = 18;
|
codegen[outIndex] = 18;
|
||||||
outIndex++;
|
outIndex++;
|
||||||
codegen[outIndex] = uint8(n - 11);
|
codegen[outIndex] = uint8(n-11);
|
||||||
outIndex++;
|
outIndex++;
|
||||||
w.codegenFreq[18]++;
|
w.codegenFreq[18]++;
|
||||||
count -= n;
|
count -= n;
|
||||||
|
|
@ -251,7 +248,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
|
||||||
// count >= 3 && count <= 10
|
// count >= 3 && count <= 10
|
||||||
codegen[outIndex] = 17;
|
codegen[outIndex] = 17;
|
||||||
outIndex++;
|
outIndex++;
|
||||||
codegen[outIndex] = uint8(count - 3);
|
codegen[outIndex] = uint8(count-3);
|
||||||
outIndex++;
|
outIndex++;
|
||||||
w.codegenFreq[17]++;
|
w.codegenFreq[17]++;
|
||||||
count = 0;
|
count = 0;
|
||||||
|
|
@ -295,8 +292,8 @@ func (w *huffmanBitWriter) writeDynamicHeader(numLiterals int, numOffsets int, n
|
||||||
w.writeBits(int32(numLiterals - 257), 5);
|
w.writeBits(int32(numLiterals - 257), 5);
|
||||||
if numOffsets > offsetCodeCount {
|
if numOffsets > offsetCodeCount {
|
||||||
// Extended version of deflater
|
// Extended version of deflater
|
||||||
w.writeBits(int32(offsetCodeCount + ((numOffsets - (1 + offsetCodeCount)) >> 3)), 5);
|
w.writeBits(int32(offsetCodeCount + ((numOffsets - (1 + offsetCodeCount))>>3)), 5);
|
||||||
w.writeBits(int32((numOffsets - (1 + offsetCodeCount)) & 0x7), 3);
|
w.writeBits(int32((numOffsets - (1 + offsetCodeCount))&0x7), 3);
|
||||||
} else {
|
} else {
|
||||||
w.writeBits(int32(numOffsets - 1), 5);
|
w.writeBits(int32(numOffsets - 1), 5);
|
||||||
}
|
}
|
||||||
|
|
@ -368,10 +365,10 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
fillInt32s(w.offsetFreq, 0);
|
fillInt32s(w.offsetFreq, 0);
|
||||||
|
|
||||||
n := len(tokens);
|
n := len(tokens);
|
||||||
tokens = tokens[0:n+1];
|
tokens = tokens[0 : n+1];
|
||||||
tokens[n] = endBlockMarker;
|
tokens[n] = endBlockMarker;
|
||||||
|
|
||||||
totalLength := -1; // Subtract 1 for endBlock.
|
totalLength := -1; // Subtract 1 for endBlock.
|
||||||
for _, t := range tokens {
|
for _, t := range tokens {
|
||||||
switch t.typ() {
|
switch t.typ() {
|
||||||
case literalType:
|
case literalType:
|
||||||
|
|
@ -381,7 +378,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
case matchType:
|
case matchType:
|
||||||
length := t.length();
|
length := t.length();
|
||||||
offset := t.offset();
|
offset := t.offset();
|
||||||
totalLength += int(length + 3);
|
totalLength += int(length+3);
|
||||||
w.literalFreq[lengthCodesStart + lengthCode(length)]++;
|
w.literalFreq[lengthCodesStart + lengthCode(length)]++;
|
||||||
w.offsetFreq[offsetCode(offset)]++;
|
w.offsetFreq[offsetCode(offset)]++;
|
||||||
break;
|
break;
|
||||||
|
|
@ -407,18 +404,18 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
var extraBits int64;
|
var extraBits int64;
|
||||||
var storedSize int64;
|
var storedSize int64;
|
||||||
if storedBytes <= maxStoreBlockSize && input != nil {
|
if storedBytes <= maxStoreBlockSize && input != nil {
|
||||||
storedSize = int64((storedBytes + 5) * 8);
|
storedSize = int64((storedBytes + 5)*8);
|
||||||
// We only bother calculating the costs of the extra bits required by
|
// We only bother calculating the costs of the extra bits required by
|
||||||
// the length of offset fields (which will be the same for both fixed
|
// the length of offset fields (which will be the same for both fixed
|
||||||
// and dynamic encoding), if we need to compare those two encodings
|
// and dynamic encoding), if we need to compare those two encodings
|
||||||
// against stored encoding.
|
// against stored encoding.
|
||||||
for lengthCode := lengthCodesStart + 8; lengthCode < numLiterals; lengthCode++ {
|
for lengthCode := lengthCodesStart + 8; lengthCode < numLiterals; lengthCode++ {
|
||||||
// First eight length codes have extra size = 0.
|
// First eight length codes have extra size = 0.
|
||||||
extraBits += int64(w.literalFreq[lengthCode]) * int64(lengthExtraBits[lengthCode - lengthCodesStart]);
|
extraBits += int64(w.literalFreq[lengthCode])*int64(lengthExtraBits[lengthCode - lengthCodesStart]);
|
||||||
}
|
}
|
||||||
for offsetCode := 4; offsetCode < numOffsets; offsetCode++ {
|
for offsetCode := 4; offsetCode < numOffsets; offsetCode++ {
|
||||||
// First four offset codes have extra size = 0.
|
// First four offset codes have extra size = 0.
|
||||||
extraBits += int64(w.offsetFreq[offsetCode]) * int64(offsetExtraBits[offsetCode]);
|
extraBits += int64(w.offsetFreq[offsetCode])*int64(offsetExtraBits[offsetCode]);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
storedSize = math.MaxInt32;
|
storedSize = math.MaxInt32;
|
||||||
|
|
@ -445,7 +442,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
if numOffsets > offsetCodeCount {
|
if numOffsets > offsetCodeCount {
|
||||||
extensionSummand = 3;
|
extensionSummand = 3;
|
||||||
}
|
}
|
||||||
dynamicHeader := int64(3 + 5 + 5 + 4 + (3 * numCodegens)) +
|
dynamicHeader := int64(3+5+5+4+(3 * numCodegens)) +
|
||||||
// Following line is an extension.
|
// Following line is an extension.
|
||||||
int64(extensionSummand) +
|
int64(extensionSummand) +
|
||||||
w.codegenEncoding.bitLength(w.codegenFreq) +
|
w.codegenEncoding.bitLength(w.codegenFreq) +
|
||||||
|
|
@ -459,7 +456,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
|
|
||||||
if storedSize < fixedSize && storedSize < dynamicSize {
|
if storedSize < fixedSize && storedSize < dynamicSize {
|
||||||
w.writeStoredHeader(storedBytes, eof);
|
w.writeStoredHeader(storedBytes, eof);
|
||||||
w.writeBytes(input[0:storedBytes]);
|
w.writeBytes(input[0 : storedBytes]);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
var literalEncoding *huffmanEncoder;
|
var literalEncoding *huffmanEncoder;
|
||||||
|
|
@ -507,4 +504,3 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -66,10 +66,10 @@ type huffmanDecoder struct {
|
||||||
// limit[i] = largest code word of length i
|
// limit[i] = largest code word of length i
|
||||||
// Given code v of length n,
|
// Given code v of length n,
|
||||||
// need more bits if v > limit[n].
|
// need more bits if v > limit[n].
|
||||||
limit [maxCodeLen+1]int;
|
limit [maxCodeLen + 1]int;
|
||||||
|
|
||||||
// base[i] = smallest code word of length i - seq number
|
// base[i] = smallest code word of length i - seq number
|
||||||
base [maxCodeLen+1]int;
|
base [maxCodeLen + 1]int;
|
||||||
|
|
||||||
// codes[seq number] = output code.
|
// codes[seq number] = output code.
|
||||||
// Given code v of length n, value is
|
// Given code v of length n, value is
|
||||||
|
|
@ -83,7 +83,7 @@ func (h *huffmanDecoder) init(bits []int) bool {
|
||||||
|
|
||||||
// Count number of codes of each length,
|
// Count number of codes of each length,
|
||||||
// compute min and max length.
|
// compute min and max length.
|
||||||
var count [maxCodeLen+1]int;
|
var count [maxCodeLen + 1]int;
|
||||||
var min, max int;
|
var min, max int;
|
||||||
for _, n := range bits {
|
for _, n := range bits {
|
||||||
if n == 0 {
|
if n == 0 {
|
||||||
|
|
@ -142,8 +142,8 @@ func (h *huffmanDecoder) init(bits []int) bool {
|
||||||
// See RFC 1951, section 3.2.6.
|
// See RFC 1951, section 3.2.6.
|
||||||
var fixedHuffmanDecoder = huffmanDecoder{
|
var fixedHuffmanDecoder = huffmanDecoder{
|
||||||
7, 9,
|
7, 9,
|
||||||
[maxCodeLen+1]int{7: 23, 199, 511},
|
[maxCodeLen + 1]int{7: 23, 199, 511},
|
||||||
[maxCodeLen+1]int{7: 0, 24, 224},
|
[maxCodeLen + 1]int{7: 0, 24, 224},
|
||||||
[]int{
|
[]int{
|
||||||
// length 7: 256-279
|
// length 7: 256-279
|
||||||
256, 257, 258, 259, 260, 261, 262,
|
256, 257, 258, 259, 260, 261, 262,
|
||||||
|
|
@ -271,11 +271,11 @@ func (f *inflater) readHuffman() os.Error {
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
nlit := int(f.b & 0x1F) + 257;
|
nlit := int(f.b & 0x1F)+257;
|
||||||
f.b >>= 5;
|
f.b >>= 5;
|
||||||
ndist := int(f.b & 0x1F) + 1;
|
ndist := int(f.b & 0x1F)+1;
|
||||||
f.b >>= 5;
|
f.b >>= 5;
|
||||||
nclen := int(f.b & 0xF) + 4;
|
nclen := int(f.b & 0xF)+4;
|
||||||
f.b >>= 4;
|
f.b >>= 4;
|
||||||
f.nb -= 5+5+4;
|
f.nb -= 5+5+4;
|
||||||
|
|
||||||
|
|
@ -437,7 +437,7 @@ func (f *inflater) decodeBlock(hl, hd *huffmanDecoder) os.Error {
|
||||||
case dist >= 30:
|
case dist >= 30:
|
||||||
return CorruptInputError(f.roffset);
|
return CorruptInputError(f.roffset);
|
||||||
default:
|
default:
|
||||||
nb := uint(dist-2) >> 1;
|
nb := uint(dist-2)>>1;
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist&1)<<nb;
|
extra := (dist&1)<<nb;
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
|
|
@ -495,8 +495,8 @@ func (f *inflater) dataBlock() os.Error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &ReadError{f.roffset, err};
|
return &ReadError{f.roffset, err};
|
||||||
}
|
}
|
||||||
n := int(f.buf[0]) | int(f.buf[1]) << 8;
|
n := int(f.buf[0]) | int(f.buf[1])<<8;
|
||||||
nn := int(f.buf[2]) | int(f.buf[3]) << 8;
|
nn := int(f.buf[2]) | int(f.buf[3])<<8;
|
||||||
if uint16(nn) != uint16(^n) {
|
if uint16(nn) != uint16(^n) {
|
||||||
return CorruptInputError(f.roffset);
|
return CorruptInputError(f.roffset);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -44,5 +44,5 @@ func reverseUint16(v uint16) uint16 {
|
||||||
}
|
}
|
||||||
|
|
||||||
func reverseBits(number uint16, bitLength byte) uint16 {
|
func reverseBits(number uint16, bitLength byte) uint16 {
|
||||||
return reverseUint16(number << uint8(16-bitLength));
|
return reverseUint16(number<<uint8(16 - bitLength));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ const (
|
||||||
// 8 bits: xlength = length - MIN_MATCH_LENGTH
|
// 8 bits: xlength = length - MIN_MATCH_LENGTH
|
||||||
// 22 bits xoffset = offset - MIN_OFFSET_SIZE, or literal
|
// 22 bits xoffset = offset - MIN_OFFSET_SIZE, or literal
|
||||||
lengthShift = 22;
|
lengthShift = 22;
|
||||||
offsetMask = 1<<lengthShift - 1;
|
offsetMask = 1 << lengthShift - 1;
|
||||||
typeMask = 3<<30;
|
typeMask = 3<<30;
|
||||||
literalType = 0<<30;
|
literalType = 0<<30;
|
||||||
matchType = 1<<30;
|
matchType = 1<<30;
|
||||||
|
|
@ -69,12 +69,12 @@ type token uint32
|
||||||
|
|
||||||
// Convert a literal into a literal token.
|
// Convert a literal into a literal token.
|
||||||
func literalToken(literal uint32) token {
|
func literalToken(literal uint32) token {
|
||||||
return token(literalType+literal);
|
return token(literalType + literal);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert a < xlength, xoffset > pair into a match token.
|
// Convert a < xlength, xoffset > pair into a match token.
|
||||||
func matchToken(xlength uint32, xoffset uint32) token {
|
func matchToken(xlength uint32, xoffset uint32) token {
|
||||||
return token(matchType + xlength<<lengthShift + xoffset);
|
return token(matchType + xlength << lengthShift + xoffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the type of a token
|
// Returns the type of a token
|
||||||
|
|
@ -84,16 +84,16 @@ func (t token) typ() uint32 {
|
||||||
|
|
||||||
// Returns the literal of a literal token
|
// Returns the literal of a literal token
|
||||||
func (t token) literal() uint32 {
|
func (t token) literal() uint32 {
|
||||||
return uint32(t-literalType);
|
return uint32(t - literalType);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the extra offset of a match token
|
// Returns the extra offset of a match token
|
||||||
func (t token) offset() uint32 {
|
func (t token) offset() uint32 {
|
||||||
return uint32(t)&offsetMask;
|
return uint32(t) & offsetMask;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t token) length() uint32 {
|
func (t token) length() uint32 {
|
||||||
return uint32((t-matchType)>>lengthShift);
|
return uint32((t - matchType) >> lengthShift);
|
||||||
}
|
}
|
||||||
|
|
||||||
func lengthCode(len uint32) uint32 {
|
func lengthCode(len uint32) uint32 {
|
||||||
|
|
@ -107,9 +107,9 @@ func offsetCode(off uint32) uint32 {
|
||||||
case off < n:
|
case off < n:
|
||||||
return offsetCodes[off];
|
return offsetCodes[off];
|
||||||
case off>>7 < n:
|
case off>>7 < n:
|
||||||
return offsetCodes[off>>7]+14;
|
return offsetCodes[off>>7] + 14;
|
||||||
default:
|
default:
|
||||||
return offsetCodes[off>>14]+28;
|
return offsetCodes[off>>14] + 28;
|
||||||
}
|
}
|
||||||
panic("unreachable");
|
panic("unreachable");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,7 @@ func (z *Inflater) read2() (uint32, os.Error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err;
|
return 0, err;
|
||||||
}
|
}
|
||||||
return uint32(z.buf[0]) | uint32(z.buf[1]) << 8, nil;
|
return uint32(z.buf[0]) | uint32(z.buf[1])<<8, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (z *Inflater) readHeader(save bool) os.Error {
|
func (z *Inflater) readHeader(save bool) os.Error {
|
||||||
|
|
|
||||||
|
|
@ -12,31 +12,31 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type gzipTest struct {
|
type gzipTest struct {
|
||||||
name string;
|
name string;
|
||||||
desc string;
|
desc string;
|
||||||
raw string;
|
raw string;
|
||||||
gzip []byte;
|
gzip []byte;
|
||||||
err os.Error;
|
err os.Error;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gzipTests = []gzipTest {
|
var gzipTests = []gzipTest{
|
||||||
gzipTest { // has 1 empty fixed-huffman block
|
gzipTest{ // has 1 empty fixed-huffman block
|
||||||
"empty.txt",
|
"empty.txt",
|
||||||
"empty.txt",
|
"empty.txt",
|
||||||
"",
|
"",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xf7, 0x5e, 0x14, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xf7, 0x5e, 0x14, 0x4a,
|
||||||
0x00, 0x03, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e,
|
0x00, 0x03, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0x03, 0x00, 0x00, 0x00,
|
0x74, 0x78, 0x74, 0x00, 0x03, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
},
|
},
|
||||||
nil
|
nil,
|
||||||
},
|
},
|
||||||
gzipTest { // has 1 non-empty fixed huffman block
|
gzipTest{ // has 1 non-empty fixed huffman block
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -44,14 +44,14 @@ var gzipTests = []gzipTest {
|
||||||
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
|
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
|
||||||
0x00, 0x00,
|
0x00, 0x00,
|
||||||
},
|
},
|
||||||
nil
|
nil,
|
||||||
},
|
},
|
||||||
gzipTest { // concatenation
|
gzipTest{ // concatenation
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt x2",
|
"hello.txt x2",
|
||||||
"hello world\n"
|
"hello world\n"
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -65,13 +65,13 @@ var gzipTests = []gzipTest {
|
||||||
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
|
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
|
||||||
0x00, 0x00,
|
0x00, 0x00,
|
||||||
},
|
},
|
||||||
nil
|
nil,
|
||||||
},
|
},
|
||||||
gzipTest { // has a fixed huffman block with some length-distance pairs
|
gzipTest{ // has a fixed huffman block with some length-distance pairs
|
||||||
"shesells.txt",
|
"shesells.txt",
|
||||||
"shesells.txt",
|
"shesells.txt",
|
||||||
"she sells seashells by the seashore\n",
|
"she sells seashells by the seashore\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0x72, 0x66, 0x8b, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0x72, 0x66, 0x8b, 0x4a,
|
||||||
0x00, 0x03, 0x73, 0x68, 0x65, 0x73, 0x65, 0x6c,
|
0x00, 0x03, 0x73, 0x68, 0x65, 0x73, 0x65, 0x6c,
|
||||||
0x6c, 0x73, 0x2e, 0x74, 0x78, 0x74, 0x00, 0x2b,
|
0x6c, 0x73, 0x2e, 0x74, 0x78, 0x74, 0x00, 0x2b,
|
||||||
|
|
@ -81,9 +81,9 @@ var gzipTests = []gzipTest {
|
||||||
0x94, 0xca, 0x05, 0x00, 0x76, 0xb0, 0x3b, 0xeb,
|
0x94, 0xca, 0x05, 0x00, 0x76, 0xb0, 0x3b, 0xeb,
|
||||||
0x24, 0x00, 0x00, 0x00,
|
0x24, 0x00, 0x00, 0x00,
|
||||||
},
|
},
|
||||||
nil
|
nil,
|
||||||
},
|
},
|
||||||
gzipTest { // has dynamic huffman blocks
|
gzipTest{ // has dynamic huffman blocks
|
||||||
"gettysburg",
|
"gettysburg",
|
||||||
"gettysburg",
|
"gettysburg",
|
||||||
" Four score and seven years ago our fathers brought forth on\n"
|
" Four score and seven years ago our fathers brought forth on\n"
|
||||||
|
|
@ -115,7 +115,7 @@ var gzipTests = []gzipTest {
|
||||||
"people, for the people, shall not perish from this earth.\n"
|
"people, for the people, shall not perish from this earth.\n"
|
||||||
"\n"
|
"\n"
|
||||||
"Abraham Lincoln, November 19, 1863, Gettysburg, Pennsylvania\n",
|
"Abraham Lincoln, November 19, 1863, Gettysburg, Pennsylvania\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xd1, 0x12, 0x2b, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xd1, 0x12, 0x2b, 0x4a,
|
||||||
0x00, 0x03, 0x67, 0x65, 0x74, 0x74, 0x79, 0x73,
|
0x00, 0x03, 0x67, 0x65, 0x74, 0x74, 0x79, 0x73,
|
||||||
0x62, 0x75, 0x72, 0x67, 0x00, 0x65, 0x54, 0xcd,
|
0x62, 0x75, 0x72, 0x67, 0x00, 0x65, 0x54, 0xcd,
|
||||||
|
|
@ -219,13 +219,13 @@ var gzipTests = []gzipTest {
|
||||||
0x4a, 0x65, 0x8f, 0x08, 0x42, 0x60, 0xf7, 0x0f,
|
0x4a, 0x65, 0x8f, 0x08, 0x42, 0x60, 0xf7, 0x0f,
|
||||||
0xb9, 0x16, 0x0b, 0x0c, 0x1a, 0x06, 0x00, 0x00,
|
0xb9, 0x16, 0x0b, 0x0c, 0x1a, 0x06, 0x00, 0x00,
|
||||||
},
|
},
|
||||||
nil
|
nil,
|
||||||
},
|
},
|
||||||
gzipTest { // has 1 non-empty fixed huffman block then garbage
|
gzipTest{ // has 1 non-empty fixed huffman block then garbage
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt + garbage",
|
"hello.txt + garbage",
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -235,11 +235,11 @@ var gzipTests = []gzipTest {
|
||||||
},
|
},
|
||||||
HeaderError,
|
HeaderError,
|
||||||
},
|
},
|
||||||
gzipTest { // has 1 non-empty fixed huffman block not enough header
|
gzipTest{ // has 1 non-empty fixed huffman block not enough header
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt + garbage",
|
"hello.txt + garbage",
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -249,11 +249,11 @@ var gzipTests = []gzipTest {
|
||||||
},
|
},
|
||||||
io.ErrUnexpectedEOF,
|
io.ErrUnexpectedEOF,
|
||||||
},
|
},
|
||||||
gzipTest { // has 1 non-empty fixed huffman block but corrupt checksum
|
gzipTest{ // has 1 non-empty fixed huffman block but corrupt checksum
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt + corrupt checksum",
|
"hello.txt + corrupt checksum",
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -263,11 +263,11 @@ var gzipTests = []gzipTest {
|
||||||
},
|
},
|
||||||
ChecksumError,
|
ChecksumError,
|
||||||
},
|
},
|
||||||
gzipTest { // has 1 non-empty fixed huffman block but corrupt size
|
gzipTest{ // has 1 non-empty fixed huffman block but corrupt size
|
||||||
"hello.txt",
|
"hello.txt",
|
||||||
"hello.txt + corrupt size",
|
"hello.txt + corrupt size",
|
||||||
"hello world\n",
|
"hello world\n",
|
||||||
[]byte {
|
[]byte{
|
||||||
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
|
||||||
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
|
||||||
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
|
||||||
|
|
@ -303,4 +303,3 @@ func TestInflater(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ func NewInflater(r io.Reader) (io.ReadCloser, os.Error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err;
|
return nil, err;
|
||||||
}
|
}
|
||||||
h := uint(z.scratch[0]) << 8 | uint(z.scratch[1]);
|
h := uint(z.scratch[0])<<8 | uint(z.scratch[1]);
|
||||||
if (z.scratch[0] & 0x0f != zlibDeflate) || (h%31 != 0) {
|
if (z.scratch[0] & 0x0f != zlibDeflate) || (h%31 != 0) {
|
||||||
return nil, HeaderError;
|
return nil, HeaderError;
|
||||||
}
|
}
|
||||||
|
|
@ -77,7 +77,7 @@ func (z *reader) Read(p []byte) (n int, err os.Error) {
|
||||||
return 0, err;
|
return 0, err;
|
||||||
}
|
}
|
||||||
// ZLIB (RFC 1950) is big-endian, unlike GZIP (RFC 1952).
|
// ZLIB (RFC 1950) is big-endian, unlike GZIP (RFC 1952).
|
||||||
checksum := uint32(z.scratch[0]) << 24 | uint32(z.scratch[1]) << 16 | uint32(z.scratch[2]) << 8 | uint32(z.scratch[3]);
|
checksum := uint32(z.scratch[0])<<24 | uint32(z.scratch[1])<<16 | uint32(z.scratch[2])<<8 | uint32(z.scratch[3]);
|
||||||
if checksum != z.digest.Sum32() {
|
if checksum != z.digest.Sum32() {
|
||||||
z.err = ChecksumError;
|
z.err = ChecksumError;
|
||||||
return 0, z.err;
|
return 0, z.err;
|
||||||
|
|
|
||||||
|
|
@ -73,7 +73,7 @@ func (p *Vector) Init(initial_len int) *Vector {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.a = a[0:initial_len];
|
p.a = a[0 : initial_len];
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -108,7 +108,7 @@ func (p *Vector) Set(i int, x Element) {
|
||||||
|
|
||||||
// Last returns the element in the vector of highest index.
|
// Last returns the element in the vector of highest index.
|
||||||
func (p *Vector) Last() Element {
|
func (p *Vector) Last() Element {
|
||||||
return p.a[len(p.a) - 1];
|
return p.a[len(p.a)-1];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -146,7 +146,7 @@ func (p *Vector) Delete(i int) {
|
||||||
// x such that the 0th element of x appears at index i after insertion.
|
// x such that the 0th element of x appears at index i after insertion.
|
||||||
func (p *Vector) InsertVector(i int, x *Vector) {
|
func (p *Vector) InsertVector(i int, x *Vector) {
|
||||||
p.a = expand(p.a, i, len(x.a));
|
p.a = expand(p.a, i, len(x.a));
|
||||||
copy(p.a[i : i + len(x.a)], x.a);
|
copy(p.a[i : i+len(x.a)], x.a);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -193,7 +193,7 @@ func (p *Vector) Push(x Element) {
|
||||||
|
|
||||||
// Pop deletes the last element of the vector.
|
// Pop deletes the last element of the vector.
|
||||||
func (p *Vector) Pop() Element {
|
func (p *Vector) Pop() Element {
|
||||||
i := len(p.a) - 1;
|
i := len(p.a)-1;
|
||||||
x := p.a[i];
|
x := p.a[i];
|
||||||
p.a[i] = nil; // support GC, nil out entry
|
p.a[i] = nil; // support GC, nil out entry
|
||||||
p.a = p.a[0:i];
|
p.a = p.a[0:i];
|
||||||
|
|
|
||||||
|
|
@ -278,7 +278,7 @@ var encryptTests = []CryptTest{
|
||||||
// Test encryptBlock against FIPS 197 examples.
|
// Test encryptBlock against FIPS 197 examples.
|
||||||
func TestEncryptBlock(t *testing.T) {
|
func TestEncryptBlock(t *testing.T) {
|
||||||
for i, tt := range encryptTests {
|
for i, tt := range encryptTests {
|
||||||
n := len(tt.key) + 28;
|
n := len(tt.key)+28;
|
||||||
enc := make([]uint32, n);
|
enc := make([]uint32, n);
|
||||||
dec := make([]uint32, n);
|
dec := make([]uint32, n);
|
||||||
expandKey(tt.key, enc, dec);
|
expandKey(tt.key, enc, dec);
|
||||||
|
|
@ -296,7 +296,7 @@ func TestEncryptBlock(t *testing.T) {
|
||||||
// Test decryptBlock against FIPS 197 examples.
|
// Test decryptBlock against FIPS 197 examples.
|
||||||
func TestDecryptBlock(t *testing.T) {
|
func TestDecryptBlock(t *testing.T) {
|
||||||
for i, tt := range encryptTests {
|
for i, tt := range encryptTests {
|
||||||
n := len(tt.key) + 28;
|
n := len(tt.key)+28;
|
||||||
enc := make([]uint32, n);
|
enc := make([]uint32, n);
|
||||||
dec := make([]uint32, n);
|
dec := make([]uint32, n);
|
||||||
expandKey(tt.key, enc, dec);
|
expandKey(tt.key, enc, dec);
|
||||||
|
|
|
||||||
|
|
@ -56,19 +56,19 @@ func encryptBlock(xk []uint32, src, dst []byte) {
|
||||||
nr := len(xk)/4 - 2; // - 2: one above, one more below
|
nr := len(xk)/4 - 2; // - 2: one above, one more below
|
||||||
k := 4;
|
k := 4;
|
||||||
for r := 0; r < nr; r++ {
|
for r := 0; r < nr; r++ {
|
||||||
t0 = xk[k+0] ^ te[0][s0>>24] ^ te[1][s1>>16 & 0xff] ^ te[2][s2>>8 & 0xff] ^ te[3][s3 & 0xff];
|
t0 = xk[k+0]^te[0][s0>>24]^te[1][s1>>16&0xff]^te[2][s2>>8&0xff]^te[3][s3&0xff];
|
||||||
t1 = xk[k+1] ^ te[0][s1>>24] ^ te[1][s2>>16 & 0xff] ^ te[2][s3>>8 & 0xff] ^ te[3][s0 & 0xff];
|
t1 = xk[k+1]^te[0][s1>>24]^te[1][s2>>16&0xff]^te[2][s3>>8&0xff]^te[3][s0&0xff];
|
||||||
t2 = xk[k+2] ^ te[0][s2>>24] ^ te[1][s3>>16 & 0xff] ^ te[2][s0>>8 & 0xff] ^ te[3][s1 & 0xff];
|
t2 = xk[k+2]^te[0][s2>>24]^te[1][s3>>16&0xff]^te[2][s0>>8&0xff]^te[3][s1&0xff];
|
||||||
t3 = xk[k+3] ^ te[0][s3>>24] ^ te[1][s0>>16 & 0xff] ^ te[2][s1>>8 & 0xff] ^ te[3][s2 & 0xff];
|
t3 = xk[k+3]^te[0][s3>>24]^te[1][s0>>16&0xff]^te[2][s1>>8&0xff]^te[3][s2&0xff];
|
||||||
k += 4;
|
k += 4;
|
||||||
s0, s1, s2, s3 = t0, t1, t2, t3;
|
s0, s1, s2, s3 = t0, t1, t2, t3;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last round uses s-box directly and XORs to produce output.
|
// Last round uses s-box directly and XORs to produce output.
|
||||||
s0 = uint32(sbox0[t0>>24])<<24 | uint32(sbox0[t1>>16 & 0xff])<<16 | uint32(sbox0[t2>>8 & 0xff])<<8 | uint32(sbox0[t3 & 0xff]);
|
s0 = uint32(sbox0[t0>>24])<<24 | uint32(sbox0[t1>>16&0xff])<<16 | uint32(sbox0[t2>>8&0xff])<<8 | uint32(sbox0[t3&0xff]);
|
||||||
s1 = uint32(sbox0[t1>>24])<<24 | uint32(sbox0[t2>>16 & 0xff])<<16 | uint32(sbox0[t3>>8 & 0xff])<<8 | uint32(sbox0[t0 & 0xff]);
|
s1 = uint32(sbox0[t1>>24])<<24 | uint32(sbox0[t2>>16&0xff])<<16 | uint32(sbox0[t3>>8&0xff])<<8 | uint32(sbox0[t0&0xff]);
|
||||||
s2 = uint32(sbox0[t2>>24])<<24 | uint32(sbox0[t3>>16 & 0xff])<<16 | uint32(sbox0[t0>>8 & 0xff])<<8 | uint32(sbox0[t1 & 0xff]);
|
s2 = uint32(sbox0[t2>>24])<<24 | uint32(sbox0[t3>>16&0xff])<<16 | uint32(sbox0[t0>>8&0xff])<<8 | uint32(sbox0[t1&0xff]);
|
||||||
s3 = uint32(sbox0[t3>>24])<<24 | uint32(sbox0[t0>>16 & 0xff])<<16 | uint32(sbox0[t1>>8 & 0xff])<<8 | uint32(sbox0[t2 & 0xff]);
|
s3 = uint32(sbox0[t3>>24])<<24 | uint32(sbox0[t0>>16&0xff])<<16 | uint32(sbox0[t1>>8&0xff])<<8 | uint32(sbox0[t2&0xff]);
|
||||||
|
|
||||||
s0 ^= xk[k+0];
|
s0 ^= xk[k+0];
|
||||||
s1 ^= xk[k+1];
|
s1 ^= xk[k+1];
|
||||||
|
|
@ -101,19 +101,19 @@ func decryptBlock(xk []uint32, src, dst []byte) {
|
||||||
nr := len(xk)/4 - 2; // - 2: one above, one more below
|
nr := len(xk)/4 - 2; // - 2: one above, one more below
|
||||||
k := 4;
|
k := 4;
|
||||||
for r := 0; r < nr; r++ {
|
for r := 0; r < nr; r++ {
|
||||||
t0 = xk[k+0] ^ td[0][s0>>24] ^ td[1][s3>>16 & 0xff] ^ td[2][s2>>8 & 0xff] ^ td[3][s1 & 0xff];
|
t0 = xk[k+0]^td[0][s0>>24]^td[1][s3>>16&0xff]^td[2][s2>>8&0xff]^td[3][s1&0xff];
|
||||||
t1 = xk[k+1] ^ td[0][s1>>24] ^ td[1][s0>>16 & 0xff] ^ td[2][s3>>8 & 0xff] ^ td[3][s2 & 0xff];
|
t1 = xk[k+1]^td[0][s1>>24]^td[1][s0>>16&0xff]^td[2][s3>>8&0xff]^td[3][s2&0xff];
|
||||||
t2 = xk[k+2] ^ td[0][s2>>24] ^ td[1][s1>>16 & 0xff] ^ td[2][s0>>8 & 0xff] ^ td[3][s3 & 0xff];
|
t2 = xk[k+2]^td[0][s2>>24]^td[1][s1>>16&0xff]^td[2][s0>>8&0xff]^td[3][s3&0xff];
|
||||||
t3 = xk[k+3] ^ td[0][s3>>24] ^ td[1][s2>>16 & 0xff] ^ td[2][s1>>8 & 0xff] ^ td[3][s0 & 0xff];
|
t3 = xk[k+3]^td[0][s3>>24]^td[1][s2>>16&0xff]^td[2][s1>>8&0xff]^td[3][s0&0xff];
|
||||||
k += 4;
|
k += 4;
|
||||||
s0, s1, s2, s3 = t0, t1, t2, t3;
|
s0, s1, s2, s3 = t0, t1, t2, t3;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last round uses s-box directly and XORs to produce output.
|
// Last round uses s-box directly and XORs to produce output.
|
||||||
s0 = uint32(sbox1[t0>>24])<<24 | uint32(sbox1[t3>>16 & 0xff])<<16 | uint32(sbox1[t2>>8 & 0xff])<<8 | uint32(sbox1[t1 & 0xff]);
|
s0 = uint32(sbox1[t0>>24])<<24 | uint32(sbox1[t3>>16&0xff])<<16 | uint32(sbox1[t2>>8&0xff])<<8 | uint32(sbox1[t1&0xff]);
|
||||||
s1 = uint32(sbox1[t1>>24])<<24 | uint32(sbox1[t0>>16 & 0xff])<<16 | uint32(sbox1[t3>>8 & 0xff])<<8 | uint32(sbox1[t2 & 0xff]);
|
s1 = uint32(sbox1[t1>>24])<<24 | uint32(sbox1[t0>>16&0xff])<<16 | uint32(sbox1[t3>>8&0xff])<<8 | uint32(sbox1[t2&0xff]);
|
||||||
s2 = uint32(sbox1[t2>>24])<<24 | uint32(sbox1[t1>>16 & 0xff])<<16 | uint32(sbox1[t0>>8 & 0xff])<<8 | uint32(sbox1[t3 & 0xff]);
|
s2 = uint32(sbox1[t2>>24])<<24 | uint32(sbox1[t1>>16&0xff])<<16 | uint32(sbox1[t0>>8&0xff])<<8 | uint32(sbox1[t3&0xff]);
|
||||||
s3 = uint32(sbox1[t3>>24])<<24 | uint32(sbox1[t2>>16 & 0xff])<<16 | uint32(sbox1[t1>>8 & 0xff])<<8 | uint32(sbox1[t0 & 0xff]);
|
s3 = uint32(sbox1[t3>>24])<<24 | uint32(sbox1[t2>>16&0xff])<<16 | uint32(sbox1[t1>>8&0xff])<<8 | uint32(sbox1[t0&0xff]);
|
||||||
|
|
||||||
s0 ^= xk[k+0];
|
s0 ^= xk[k+0];
|
||||||
s1 ^= xk[k+1];
|
s1 ^= xk[k+1];
|
||||||
|
|
@ -128,11 +128,10 @@ func decryptBlock(xk []uint32, src, dst []byte) {
|
||||||
|
|
||||||
// Apply sbox0 to each byte in w.
|
// Apply sbox0 to each byte in w.
|
||||||
func subw(w uint32) uint32 {
|
func subw(w uint32) uint32 {
|
||||||
return
|
return uint32(sbox0[w>>24])<<24 |
|
||||||
uint32(sbox0[w>>24])<<24 |
|
uint32(sbox0[w>>16&0xff])<<16 |
|
||||||
uint32(sbox0[w>>16 & 0xff])<<16 |
|
uint32(sbox0[w>>8&0xff])<<8 |
|
||||||
uint32(sbox0[w>>8 & 0xff])<<8 |
|
uint32(sbox0[w&0xff]);
|
||||||
uint32(sbox0[w & 0xff]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rotate
|
// Rotate
|
||||||
|
|
@ -145,18 +144,18 @@ func rotw(w uint32) uint32 {
|
||||||
func expandKey(key []byte, enc, dec []uint32) {
|
func expandKey(key []byte, enc, dec []uint32) {
|
||||||
// Encryption key setup.
|
// Encryption key setup.
|
||||||
var i int;
|
var i int;
|
||||||
nk := len(key) / 4;
|
nk := len(key)/4;
|
||||||
for i = 0; i < nk; i++ {
|
for i = 0; i < nk; i++ {
|
||||||
enc[i] = uint32(key[4*i])<<24 | uint32(key[4*i+1])<<16 | uint32(key[4*i+2])<<8 | uint32(key[4*i+3]);
|
enc[i] = uint32(key[4*i])<<24 | uint32(key[4*i + 1])<<16 | uint32(key[4*i + 2])<<8 | uint32(key[4*i + 3]);
|
||||||
}
|
}
|
||||||
for ; i < len(enc); i++ {
|
for ; i < len(enc); i++ {
|
||||||
t := enc[i-1];
|
t := enc[i-1];
|
||||||
if i % nk == 0 {
|
if i%nk == 0 {
|
||||||
t = subw(rotw(t)) ^ (uint32(powx[i/nk - 1]) << 24);
|
t = subw(rotw(t))^(uint32(powx[i/nk - 1])<<24);
|
||||||
} else if nk > 6 && i % nk == 4 {
|
} else if nk > 6 && i%nk == 4 {
|
||||||
t = subw(t);
|
t = subw(t);
|
||||||
}
|
}
|
||||||
enc[i] = enc[i-nk] ^ t;
|
enc[i] = enc[i-nk]^t;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive decryption key from encryption key.
|
// Derive decryption key from encryption key.
|
||||||
|
|
@ -167,14 +166,13 @@ func expandKey(key []byte, enc, dec []uint32) {
|
||||||
}
|
}
|
||||||
n := len(enc);
|
n := len(enc);
|
||||||
for i := 0; i < n; i += 4 {
|
for i := 0; i < n; i += 4 {
|
||||||
ei := n - i - 4;
|
ei := n-i-4;
|
||||||
for j := 0; j < 4; j++ {
|
for j := 0; j < 4; j++ {
|
||||||
x := enc[ei+j];
|
x := enc[ei+j];
|
||||||
if i > 0 && i+4 < n {
|
if i > 0 && i+4 < n {
|
||||||
x = td[0][sbox0[x>>24]] ^ td[1][sbox0[x>>16 & 0xff]] ^ td[2][sbox0[x>>8 & 0xff]] ^ td[3][sbox0[x & 0xff]];
|
x = td[0][sbox0[x>>24]]^td[1][sbox0[x>>16&0xff]]^td[2][sbox0[x>>8&0xff]]^td[3][sbox0[x&0xff]];
|
||||||
}
|
}
|
||||||
dec[i+j] = x;
|
dec[i+j] = x;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,7 @@ func (x *ctrStream) Next() []byte {
|
||||||
x.c.Encrypt(x.ctr, x.out);
|
x.c.Encrypt(x.ctr, x.out);
|
||||||
|
|
||||||
// Increment counter
|
// Increment counter
|
||||||
for i := len(x.ctr) - 1; i >= 0; i-- {
|
for i := len(x.ctr)-1; i >= 0; i-- {
|
||||||
x.ctr[i]++;
|
x.ctr[i]++;
|
||||||
if x.ctr[i] != 0 {
|
if x.ctr[i] != 0 {
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ func TestCTR_AES(t *testing.T) {
|
||||||
|
|
||||||
for j := 0; j <= 5; j += 5 {
|
for j := 0; j <= 5; j += 5 {
|
||||||
var crypt bytes.Buffer;
|
var crypt bytes.Buffer;
|
||||||
in := tt.in[0 : len(tt.in) - j];
|
in := tt.in[0 : len(tt.in)-j];
|
||||||
w := NewCTRWriter(c, tt.iv, &crypt);
|
w := NewCTRWriter(c, tt.iv, &crypt);
|
||||||
var r io.Reader = bytes.NewBuffer(in);
|
var r io.Reader = bytes.NewBuffer(in);
|
||||||
n, err := io.Copy(r, w);
|
n, err := io.Copy(r, w);
|
||||||
|
|
@ -92,7 +92,7 @@ func TestCTR_AES(t *testing.T) {
|
||||||
|
|
||||||
for j := 0; j <= 7; j += 7 {
|
for j := 0; j <= 7; j += 7 {
|
||||||
var plain bytes.Buffer;
|
var plain bytes.Buffer;
|
||||||
out := tt.out[0 : len(tt.out) - j];
|
out := tt.out[0 : len(tt.out)-j];
|
||||||
r := NewCTRReader(c, tt.iv, bytes.NewBuffer(out));
|
r := NewCTRReader(c, tt.iv, bytes.NewBuffer(out));
|
||||||
w := &plain;
|
w := &plain;
|
||||||
n, err := io.Copy(r, w);
|
n, err := io.Copy(r, w);
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ func (x *ecbDecrypter) readPlain(p []byte) int {
|
||||||
p[i] = x.plain[i];
|
p[i] = x.plain[i];
|
||||||
}
|
}
|
||||||
if n < len(x.plain) {
|
if n < len(x.plain) {
|
||||||
x.plain = x.plain[n : len(x.plain)];
|
x.plain = x.plain[n:len(x.plain)];
|
||||||
} else {
|
} else {
|
||||||
x.plain = nil;
|
x.plain = nil;
|
||||||
}
|
}
|
||||||
|
|
@ -172,7 +172,7 @@ func (x *ecbEncrypter) flushCrypt() os.Error {
|
||||||
}
|
}
|
||||||
n, err := x.w.Write(x.crypt);
|
n, err := x.w.Write(x.crypt);
|
||||||
if n < len(x.crypt) {
|
if n < len(x.crypt) {
|
||||||
x.crypt = x.crypt[n : len(x.crypt)];
|
x.crypt = x.crypt[n:len(x.crypt)];
|
||||||
if err == nil {
|
if err == nil {
|
||||||
err = io.ErrShortWrite;
|
err = io.ErrShortWrite;
|
||||||
}
|
}
|
||||||
|
|
@ -196,7 +196,7 @@ func (x *ecbEncrypter) slidePlain() {
|
||||||
for i := 0; i < len(x.plain); i++ {
|
for i := 0; i < len(x.plain); i++ {
|
||||||
x.buf[i] = x.plain[i];
|
x.buf[i] = x.plain[i];
|
||||||
}
|
}
|
||||||
x.plain = x.buf[0 : len(x.plain)];
|
x.plain = x.buf[0:len(x.plain)];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -205,7 +205,7 @@ func (x *ecbEncrypter) slidePlain() {
|
||||||
func (x *ecbEncrypter) fillPlain(p []byte) int {
|
func (x *ecbEncrypter) fillPlain(p []byte) int {
|
||||||
off := len(x.plain);
|
off := len(x.plain);
|
||||||
n := len(p);
|
n := len(p);
|
||||||
if max := cap(x.plain) - off; n > max {
|
if max := cap(x.plain)-off; n > max {
|
||||||
n = max;
|
n = max;
|
||||||
}
|
}
|
||||||
x.plain = x.plain[0 : off+n];
|
x.plain = x.plain[0 : off+n];
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,7 @@ func TestOFB_AES(t *testing.T) {
|
||||||
|
|
||||||
for j := 0; j <= 5; j += 5 {
|
for j := 0; j <= 5; j += 5 {
|
||||||
var crypt bytes.Buffer;
|
var crypt bytes.Buffer;
|
||||||
in := tt.in[0 : len(tt.in) - j];
|
in := tt.in[0 : len(tt.in)-j];
|
||||||
w := NewOFBWriter(c, tt.iv, &crypt);
|
w := NewOFBWriter(c, tt.iv, &crypt);
|
||||||
var r io.Reader = bytes.NewBuffer(in);
|
var r io.Reader = bytes.NewBuffer(in);
|
||||||
n, err := io.Copy(r, w);
|
n, err := io.Copy(r, w);
|
||||||
|
|
@ -90,7 +90,7 @@ func TestOFB_AES(t *testing.T) {
|
||||||
|
|
||||||
for j := 0; j <= 7; j += 7 {
|
for j := 0; j <= 7; j += 7 {
|
||||||
var plain bytes.Buffer;
|
var plain bytes.Buffer;
|
||||||
out := tt.out[0 : len(tt.out) - j];
|
out := tt.out[0 : len(tt.out)-j];
|
||||||
r := NewOFBReader(c, tt.iv, bytes.NewBuffer(out));
|
r := NewOFBReader(c, tt.iv, bytes.NewBuffer(out));
|
||||||
w := &plain;
|
w := &plain;
|
||||||
n, err := io.Copy(r, w);
|
n, err := io.Copy(r, w);
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ func (p *parser) errorExpected(pos token.Position, msg string) {
|
||||||
// make the error message more specific
|
// make the error message more specific
|
||||||
msg += ", found '" + p.tok.String() + "'";
|
msg += ", found '" + p.tok.String() + "'";
|
||||||
if p.tok.IsLiteral() {
|
if p.tok.IsLiteral() {
|
||||||
msg += " " + string(p.lit);
|
msg += " "+string(p.lit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.Error(pos, msg);
|
p.Error(pos, msg);
|
||||||
|
|
@ -347,7 +347,7 @@ func remap(p *parser, name string) string {
|
||||||
packageName, suffix := name[0:i], name[i:len(name)];
|
packageName, suffix := name[0:i], name[i:len(name)];
|
||||||
// lookup package
|
// lookup package
|
||||||
if importPath, found := p.packs[packageName]; found {
|
if importPath, found := p.packs[packageName]; found {
|
||||||
name = importPath+suffix;
|
name = importPath + suffix;
|
||||||
} else {
|
} else {
|
||||||
var invalidPos token.Position;
|
var invalidPos token.Position;
|
||||||
p.Error(invalidPos, "package not declared: " + packageName);
|
p.Error(invalidPos, "package not declared: " + packageName);
|
||||||
|
|
|
||||||
|
|
@ -141,8 +141,8 @@ func sizeof(t reflect.Type) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
type decoder struct {
|
type decoder struct {
|
||||||
order ByteOrder;
|
order ByteOrder;
|
||||||
buf []byte;
|
buf []byte;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) uint8() uint8 {
|
func (d *decoder) uint8() uint8 {
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,7 @@ func (b *buf) uint8() uint8 {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
val := b.data[0];
|
val := b.data[0];
|
||||||
b.data = b.data[1 : len(b.data)];
|
b.data = b.data[1:len(b.data)];
|
||||||
b.off++;
|
b.off++;
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
@ -44,7 +44,7 @@ func (b *buf) bytes(n int) []byte {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
data := b.data[0:n];
|
data := b.data[0:n];
|
||||||
b.data = b.data[n : len(b.data)];
|
b.data = b.data[n:len(b.data)];
|
||||||
b.off += Offset(n);
|
b.off += Offset(n);
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
@ -95,7 +95,7 @@ func (b *buf) uint64() uint64 {
|
||||||
func (b *buf) varint() (c uint64, bits uint) {
|
func (b *buf) varint() (c uint64, bits uint) {
|
||||||
for i := 0; i < len(b.data); i++ {
|
for i := 0; i < len(b.data); i++ {
|
||||||
byte := b.data[i];
|
byte := b.data[i];
|
||||||
c |= uint64(byte&0x7F) << bits;
|
c |= uint64(byte&0x7F)<<bits;
|
||||||
bits += 7;
|
bits += 7;
|
||||||
if byte&0x80 == 0 {
|
if byte&0x80 == 0 {
|
||||||
b.off += Offset(i+1);
|
b.off += Offset(i+1);
|
||||||
|
|
|
||||||
|
|
@ -190,15 +190,15 @@ func (b *buf) entry(atab abbrevTable, ubase Offset) *Entry {
|
||||||
case formRefAddr:
|
case formRefAddr:
|
||||||
val = Offset(b.addr());
|
val = Offset(b.addr());
|
||||||
case formRef1:
|
case formRef1:
|
||||||
val = Offset(b.uint8()) + ubase;
|
val = Offset(b.uint8())+ubase;
|
||||||
case formRef2:
|
case formRef2:
|
||||||
val = Offset(b.uint16()) + ubase;
|
val = Offset(b.uint16())+ubase;
|
||||||
case formRef4:
|
case formRef4:
|
||||||
val = Offset(b.uint32()) + ubase;
|
val = Offset(b.uint32())+ubase;
|
||||||
case formRef8:
|
case formRef8:
|
||||||
val = Offset(b.uint64()) + ubase;
|
val = Offset(b.uint64())+ubase;
|
||||||
case formRefUdata:
|
case formRefUdata:
|
||||||
val = Offset(b.uint()) + ubase;
|
val = Offset(b.uint())+ubase;
|
||||||
|
|
||||||
// string
|
// string
|
||||||
case formString:
|
case formString:
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ var nameTests = []nameTest{
|
||||||
nameTest{EM_860, "EM_860"},
|
nameTest{EM_860, "EM_860"},
|
||||||
nameTest{SHN_LOPROC, "SHN_LOPROC"},
|
nameTest{SHN_LOPROC, "SHN_LOPROC"},
|
||||||
nameTest{SHT_PROGBITS, "SHT_PROGBITS"},
|
nameTest{SHT_PROGBITS, "SHT_PROGBITS"},
|
||||||
nameTest{SHF_MERGE+SHF_TLS, "SHF_MERGE+SHF_TLS"},
|
nameTest{SHF_MERGE + SHF_TLS, "SHF_MERGE+SHF_TLS"},
|
||||||
nameTest{PT_LOAD, "PT_LOAD"},
|
nameTest{PT_LOAD, "PT_LOAD"},
|
||||||
nameTest{PF_W+PF_R+0x50, "PF_W+PF_R+0x50"},
|
nameTest{PF_W+PF_R+0x50, "PF_W+PF_R+0x50"},
|
||||||
nameTest{DT_SYMBOLIC, "DT_SYMBOLIC"},
|
nameTest{DT_SYMBOLIC, "DT_SYMBOLIC"},
|
||||||
|
|
|
||||||
|
|
@ -298,7 +298,7 @@ func NewFile(r io.ReaderAt) (*File, os.Error) {
|
||||||
var ok bool;
|
var ok bool;
|
||||||
s.Name, ok = getString(shstrtab, int(names[i]));
|
s.Name, ok = getString(shstrtab, int(names[i]));
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, &FormatError{shoff + int64(i*shentsize), "bad section name index", names[i]};
|
return nil, &FormatError{shoff+int64(i * shentsize), "bad section name index", names[i]};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,14 +32,14 @@ var fileTests = []fileTest{
|
||||||
SectionHeader{".text", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x80483cc, 0x3cc, 0x180, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".text", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x80483cc, 0x3cc, 0x180, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".fini", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x804854c, 0x54c, 0xc, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".fini", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x804854c, 0x54c, 0xc, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x8048558, 0x558, 0xa3, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x8048558, 0x558, 0xa3, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".data", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80495fc, 0x5fc, 0xc, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".data", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80495fc, 0x5fc, 0xc, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x8049608, 0x608, 0x4, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x8049608, 0x608, 0x4, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE+SHF_ALLOC, 0x804960c, 0x60c, 0x98, 0x4, 0x0, 0x4, 0x8},
|
SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE + SHF_ALLOC, 0x804960c, 0x60c, 0x98, 0x4, 0x0, 0x4, 0x8},
|
||||||
SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496a4, 0x6a4, 0x8, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496a4, 0x6a4, 0x8, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496ac, 0x6ac, 0x8, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496ac, 0x6ac, 0x8, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496b4, 0x6b4, 0x4, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496b4, 0x6b4, 0x4, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".got", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496b8, 0x6b8, 0x1c, 0x0, 0x0, 0x4, 0x4},
|
SectionHeader{".got", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496b8, 0x6b8, 0x1c, 0x0, 0x0, 0x4, 0x4},
|
||||||
SectionHeader{".bss", SHT_NOBITS, SHF_WRITE+SHF_ALLOC, 0x80496d4, 0x6d4, 0x20, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".bss", SHT_NOBITS, SHF_WRITE + SHF_ALLOC, 0x80496d4, 0x6d4, 0x20, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x6d4, 0x12d, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x6d4, 0x12d, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x801, 0x20, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x801, 0x20, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0x821, 0x1b, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0x821, 0x1b, 0x0, 0x0, 0x1, 0x0},
|
||||||
|
|
@ -75,21 +75,21 @@ var fileTests = []fileTest{
|
||||||
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x4005a4, 0x5a4, 0x11, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x4005a4, 0x5a4, 0x11, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".eh_frame_hdr", SHT_PROGBITS, SHF_ALLOC, 0x4005b8, 0x5b8, 0x24, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".eh_frame_hdr", SHT_PROGBITS, SHF_ALLOC, 0x4005b8, 0x5b8, 0x24, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x4005e0, 0x5e0, 0xa4, 0x0, 0x0, 0x8, 0x0},
|
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x4005e0, 0x5e0, 0xa4, 0x0, 0x0, 0x8, 0x0},
|
||||||
SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600688, 0x688, 0x10, 0x0, 0x0, 0x8, 0x0},
|
SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600688, 0x688, 0x10, 0x0, 0x0, 0x8, 0x0},
|
||||||
SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600698, 0x698, 0x10, 0x0, 0x0, 0x8, 0x0},
|
SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600698, 0x698, 0x10, 0x0, 0x0, 0x8, 0x0},
|
||||||
SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x6006a8, 0x6a8, 0x8, 0x0, 0x0, 0x8, 0x0},
|
SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x6006a8, 0x6a8, 0x8, 0x0, 0x0, 0x8, 0x0},
|
||||||
SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE+SHF_ALLOC, 0x6006b0, 0x6b0, 0x1a0, 0x6, 0x0, 0x8, 0x10},
|
SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE + SHF_ALLOC, 0x6006b0, 0x6b0, 0x1a0, 0x6, 0x0, 0x8, 0x10},
|
||||||
SectionHeader{".got", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600850, 0x850, 0x8, 0x0, 0x0, 0x8, 0x8},
|
SectionHeader{".got", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600850, 0x850, 0x8, 0x0, 0x0, 0x8, 0x8},
|
||||||
SectionHeader{".got.plt", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600858, 0x858, 0x28, 0x0, 0x0, 0x8, 0x8},
|
SectionHeader{".got.plt", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600858, 0x858, 0x28, 0x0, 0x0, 0x8, 0x8},
|
||||||
SectionHeader{".data", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600880, 0x880, 0x18, 0x0, 0x0, 0x8, 0x0},
|
SectionHeader{".data", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600880, 0x880, 0x18, 0x0, 0x0, 0x8, 0x0},
|
||||||
SectionHeader{".bss", SHT_NOBITS, SHF_WRITE+SHF_ALLOC, 0x600898, 0x898, 0x8, 0x0, 0x0, 0x4, 0x0},
|
SectionHeader{".bss", SHT_NOBITS, SHF_WRITE + SHF_ALLOC, 0x600898, 0x898, 0x8, 0x0, 0x0, 0x4, 0x0},
|
||||||
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x898, 0x126, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x898, 0x126, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x9c0, 0x90, 0x0, 0x0, 0x10, 0x0},
|
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x9c0, 0x90, 0x0, 0x0, 0x10, 0x0},
|
||||||
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0xa50, 0x25, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0xa50, 0x25, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_info", SHT_PROGBITS, 0x0, 0x0, 0xa75, 0x1a7, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_info", SHT_PROGBITS, 0x0, 0x0, 0xa75, 0x1a7, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_abbrev", SHT_PROGBITS, 0x0, 0x0, 0xc1c, 0x6f, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_abbrev", SHT_PROGBITS, 0x0, 0x0, 0xc1c, 0x6f, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_line", SHT_PROGBITS, 0x0, 0x0, 0xc8b, 0x13f, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".debug_line", SHT_PROGBITS, 0x0, 0x0, 0xc8b, 0x13f, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".debug_str", SHT_PROGBITS, SHF_MERGE+SHF_STRINGS, 0x0, 0xdca, 0xb1, 0x0, 0x0, 0x1, 0x1},
|
SectionHeader{".debug_str", SHT_PROGBITS, SHF_MERGE + SHF_STRINGS, 0x0, 0xdca, 0xb1, 0x0, 0x0, 0x1, 0x1},
|
||||||
SectionHeader{".debug_ranges", SHT_PROGBITS, 0x0, 0x0, 0xe80, 0x90, 0x0, 0x0, 0x10, 0x0},
|
SectionHeader{".debug_ranges", SHT_PROGBITS, 0x0, 0x0, 0xe80, 0x90, 0x0, 0x0, 0x10, 0x0},
|
||||||
SectionHeader{".shstrtab", SHT_STRTAB, 0x0, 0x0, 0xf10, 0x149, 0x0, 0x0, 0x1, 0x0},
|
SectionHeader{".shstrtab", SHT_STRTAB, 0x0, 0x0, 0xf10, 0x149, 0x0, 0x0, 0x1, 0x0},
|
||||||
SectionHeader{".symtab", SHT_SYMTAB, 0x0, 0x0, 0x19a0, 0x6f0, 0x24, 0x39, 0x8, 0x18},
|
SectionHeader{".symtab", SHT_SYMTAB, 0x0, 0x0, 0x19a0, 0x6f0, 0x24, 0x39, 0x8, 0x18},
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ func (t *LineTable) parse(targetPC uint64, targetLine int) (b []byte, pc uint64,
|
||||||
case code <= 128:
|
case code <= 128:
|
||||||
line -= int(code-64);
|
line -= int(code-64);
|
||||||
default:
|
default:
|
||||||
pc += quantum * uint64(code-128);
|
pc += quantum*uint64(code-128);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
pc += quantum;
|
pc += quantum;
|
||||||
|
|
|
||||||
|
|
@ -515,7 +515,7 @@ func (ev *debugEvent) doTrap() (threadState, os.Error) {
|
||||||
return stopped, err;
|
return stopped, err;
|
||||||
}
|
}
|
||||||
|
|
||||||
b, ok := t.proc.breakpoints[uintptr(regs.PC()) - uintptr(len(bpinst386))];
|
b, ok := t.proc.breakpoints[uintptr(regs.PC())-uintptr(len(bpinst386))];
|
||||||
if !ok {
|
if !ok {
|
||||||
// We must have hit a breakpoint that was actually in
|
// We must have hit a breakpoint that was actually in
|
||||||
// the program. Leave the IP where it is so we don't
|
// the program. Leave the IP where it is so we don't
|
||||||
|
|
@ -1218,7 +1218,7 @@ func (p *process) attachAllThreads() os.Error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// There could have been a race, or
|
// There could have been a race, or
|
||||||
// this process could be a zobmie.
|
// this process could be a zobmie.
|
||||||
statFile, err2 := io.ReadFile(taskPath + "/" + tidStr + "/stat");
|
statFile, err2 := io.ReadFile(taskPath+"/"+tidStr+"/stat");
|
||||||
if err2 != nil {
|
if err2 != nil {
|
||||||
switch err2 := err2.(type) {
|
switch err2 := err2.(type) {
|
||||||
case *os.PathError:
|
case *os.PathError:
|
||||||
|
|
|
||||||
|
|
@ -15,10 +15,10 @@ import (
|
||||||
|
|
||||||
type parser struct {
|
type parser struct {
|
||||||
scanner.ErrorVector;
|
scanner.ErrorVector;
|
||||||
scanner scanner.Scanner;
|
scanner scanner.Scanner;
|
||||||
pos token.Position; // token position
|
pos token.Position; // token position
|
||||||
tok token.Token; // one token look-ahead
|
tok token.Token; // one token look-ahead
|
||||||
lit []byte; // token literal
|
lit []byte; // token literal
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -39,7 +39,7 @@ func (p *parser) errorExpected(pos token.Position, msg string) {
|
||||||
// make the error message more specific
|
// make the error message more specific
|
||||||
msg += ", found '" + p.tok.String() + "'";
|
msg += ", found '" + p.tok.String() + "'";
|
||||||
if p.tok.IsLiteral() {
|
if p.tok.IsLiteral() {
|
||||||
msg += " " + string(p.lit);
|
msg += " "+string(p.lit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.Error(pos, msg);
|
p.Error(pos, msg);
|
||||||
|
|
@ -51,7 +51,7 @@ func (p *parser) expect(tok token.Token) token.Position {
|
||||||
if p.tok != tok {
|
if p.tok != tok {
|
||||||
p.errorExpected(pos, "'" + tok.String() + "'");
|
p.errorExpected(pos, "'" + tok.String() + "'");
|
||||||
}
|
}
|
||||||
p.next(); // make progress in any case
|
p.next(); // make progress in any case
|
||||||
return pos;
|
return pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -185,7 +185,7 @@ func (p *parser) parse(filename string, src []byte) Grammar {
|
||||||
// initialize parser
|
// initialize parser
|
||||||
p.ErrorVector.Init();
|
p.ErrorVector.Init();
|
||||||
p.scanner.Init(filename, src, p, 0);
|
p.scanner.Init(filename, src, p, 0);
|
||||||
p.next(); // initializes pos, tok, lit
|
p.next(); // initializes pos, tok, lit
|
||||||
|
|
||||||
grammar := make(Grammar);
|
grammar := make(Grammar);
|
||||||
for p.tok != token.EOF {
|
for p.tok != token.EOF {
|
||||||
|
|
|
||||||
|
|
@ -396,7 +396,7 @@ func (f *allFlags) parseOne(index int) (ok bool, next int) {
|
||||||
return false, index+1;
|
return false, index+1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
name := s[num_minuses:len(s)];
|
name := s[num_minuses : len(s)];
|
||||||
if len(name) == 0 || name[0] == '-' || name[0] == '=' {
|
if len(name) == 0 || name[0] == '-' || name[0] == '=' {
|
||||||
fmt.Fprintln(os.Stderr, "bad flag syntax:", s);
|
fmt.Fprintln(os.Stderr, "bad flag syntax:", s);
|
||||||
Usage();
|
Usage();
|
||||||
|
|
@ -439,7 +439,7 @@ func (f *allFlags) parseOne(index int) (ok bool, next int) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// It must have a value, which might be the next argument.
|
// It must have a value, which might be the next argument.
|
||||||
if !has_value && index < len(os.Args) - 1 {
|
if !has_value && index < len(os.Args)-1 {
|
||||||
// value is the next arg
|
// value is the next arg
|
||||||
has_value = true;
|
has_value = true;
|
||||||
index++;
|
index++;
|
||||||
|
|
|
||||||
|
|
@ -164,7 +164,7 @@ func (p *pp) ensure(n int) {
|
||||||
if len(p.buf) < n {
|
if len(p.buf) < n {
|
||||||
newn := allocSize + len(p.buf);
|
newn := allocSize + len(p.buf);
|
||||||
if newn < n {
|
if newn < n {
|
||||||
newn = n+allocSize;
|
newn = n + allocSize;
|
||||||
}
|
}
|
||||||
b := make([]byte, newn);
|
b := make([]byte, newn);
|
||||||
for i := 0; i < p.n; i++ {
|
for i := 0; i < p.n; i++ {
|
||||||
|
|
|
||||||
|
|
@ -203,7 +203,7 @@ func MergePackageFiles(pkg *Package) *File {
|
||||||
ndecls := 0;
|
ndecls := 0;
|
||||||
for _, f := range pkg.Files {
|
for _, f := range pkg.Files {
|
||||||
if f.Doc != nil {
|
if f.Doc != nil {
|
||||||
ncomments += len(f.Doc.List) + 1; // +1 for separator
|
ncomments += len(f.Doc.List)+1; // +1 for separator
|
||||||
}
|
}
|
||||||
ndecls += len(f.Decls);
|
ndecls += len(f.Decls);
|
||||||
}
|
}
|
||||||
|
|
@ -215,7 +215,7 @@ func MergePackageFiles(pkg *Package) *File {
|
||||||
// than drop them on the floor.
|
// than drop them on the floor.
|
||||||
var doc *CommentGroup;
|
var doc *CommentGroup;
|
||||||
if ncomments > 0 {
|
if ncomments > 0 {
|
||||||
list := make([]*Comment, ncomments-1); // -1: no separator before first group
|
list := make([]*Comment, ncomments - 1); // -1: no separator before first group
|
||||||
i := 0;
|
i := 0;
|
||||||
for _, f := range pkg.Files {
|
for _, f := range pkg.Files {
|
||||||
if f.Doc != nil {
|
if f.Doc != nil {
|
||||||
|
|
|
||||||
|
|
@ -18,11 +18,11 @@ import (
|
||||||
type typeDoc struct {
|
type typeDoc struct {
|
||||||
// len(decl.Specs) == 1, and the element type is *ast.TypeSpec
|
// len(decl.Specs) == 1, and the element type is *ast.TypeSpec
|
||||||
// if the type declaration hasn't been seen yet, decl is nil
|
// if the type declaration hasn't been seen yet, decl is nil
|
||||||
decl *ast.GenDecl;
|
decl *ast.GenDecl;
|
||||||
// values, factory functions, and methods associated with the type
|
// values, factory functions, and methods associated with the type
|
||||||
values *vector.Vector; // list of *ast.GenDecl (consts and vars)
|
values *vector.Vector; // list of *ast.GenDecl (consts and vars)
|
||||||
factories map[string] *ast.FuncDecl;
|
factories map[string]*ast.FuncDecl;
|
||||||
methods map[string] *ast.FuncDecl;
|
methods map[string]*ast.FuncDecl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -34,18 +34,18 @@ type typeDoc struct {
|
||||||
// printing the corresponding AST node).
|
// printing the corresponding AST node).
|
||||||
//
|
//
|
||||||
type docReader struct {
|
type docReader struct {
|
||||||
doc *ast.CommentGroup; // package documentation, if any
|
doc *ast.CommentGroup; // package documentation, if any
|
||||||
values *vector.Vector; // list of *ast.GenDecl (consts and vars)
|
values *vector.Vector; // list of *ast.GenDecl (consts and vars)
|
||||||
types map[string] *typeDoc;
|
types map[string]*typeDoc;
|
||||||
funcs map[string] *ast.FuncDecl;
|
funcs map[string]*ast.FuncDecl;
|
||||||
bugs *vector.Vector; // list of *ast.CommentGroup
|
bugs *vector.Vector; // list of *ast.CommentGroup
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (doc *docReader) init() {
|
func (doc *docReader) init() {
|
||||||
doc.values = vector.New(0);
|
doc.values = vector.New(0);
|
||||||
doc.types = make(map[string] *typeDoc);
|
doc.types = make(map[string]*typeDoc);
|
||||||
doc.funcs = make(map[string] *ast.FuncDecl);
|
doc.funcs = make(map[string]*ast.FuncDecl);
|
||||||
doc.bugs = vector.New(0);
|
doc.bugs = vector.New(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -65,13 +65,13 @@ func (doc *docReader) addType(decl *ast.GenDecl) {
|
||||||
|
|
||||||
func (doc *docReader) lookupTypeDoc(name string) *typeDoc {
|
func (doc *docReader) lookupTypeDoc(name string) *typeDoc {
|
||||||
if name == "" {
|
if name == "" {
|
||||||
return nil; // no type docs for anonymous types
|
return nil; // no type docs for anonymous types
|
||||||
}
|
}
|
||||||
if tdoc, found := doc.types[name]; found {
|
if tdoc, found := doc.types[name]; found {
|
||||||
return tdoc;
|
return tdoc;
|
||||||
}
|
}
|
||||||
// type wasn't found - add one without declaration
|
// type wasn't found - add one without declaration
|
||||||
tdoc := &typeDoc{nil, vector.New(0), make(map[string] *ast.FuncDecl), make(map[string] *ast.FuncDecl)};
|
tdoc := &typeDoc{nil, vector.New(0), make(map[string]*ast.FuncDecl), make(map[string]*ast.FuncDecl)};
|
||||||
doc.types[name] = tdoc;
|
doc.types[name] = tdoc;
|
||||||
return tdoc;
|
return tdoc;
|
||||||
}
|
}
|
||||||
|
|
@ -136,7 +136,7 @@ func (doc *docReader) addValue(decl *ast.GenDecl) {
|
||||||
// typed entries are sufficiently frequent
|
// typed entries are sufficiently frequent
|
||||||
typ := doc.lookupTypeDoc(domName);
|
typ := doc.lookupTypeDoc(domName);
|
||||||
if typ != nil {
|
if typ != nil {
|
||||||
values = typ.values; // associate with that type
|
values = typ.values; // associate with that type
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -207,7 +207,7 @@ func (doc *docReader) addDecl(decl ast.Decl) {
|
||||||
// would lose GenDecl documentation if the TypeSpec
|
// would lose GenDecl documentation if the TypeSpec
|
||||||
// has documentation as well.
|
// has documentation as well.
|
||||||
doc.addType(&ast.GenDecl{d.Doc, d.Pos(), token.TYPE, noPos, []ast.Spec{spec}, noPos});
|
doc.addType(&ast.GenDecl{d.Doc, d.Pos(), token.TYPE, noPos, []ast.Spec{spec}, noPos});
|
||||||
// A new GenDecl node is created, no need to nil out d.Doc.
|
// A new GenDecl node is created, no need to nil out d.Doc.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -228,8 +228,8 @@ func copyCommentList(list []*ast.Comment) []*ast.Comment {
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// Regexp constructor needs threads - cannot use init expressions
|
// Regexp constructor needs threads - cannot use init expressions
|
||||||
bug_markers *regexp.Regexp;
|
bug_markers *regexp.Regexp;
|
||||||
bug_content *regexp.Regexp;
|
bug_content *regexp.Regexp;
|
||||||
)
|
)
|
||||||
|
|
||||||
func makeRex(s string) *regexp.Regexp {
|
func makeRex(s string) *regexp.Regexp {
|
||||||
|
|
@ -245,8 +245,8 @@ func makeRex(s string) *regexp.Regexp {
|
||||||
//
|
//
|
||||||
func (doc *docReader) addFile(src *ast.File) {
|
func (doc *docReader) addFile(src *ast.File) {
|
||||||
if bug_markers == nil {
|
if bug_markers == nil {
|
||||||
bug_markers = makeRex("^/[/*][ \t]*BUG\\(.*\\):[ \t]*"); // BUG(uid):
|
bug_markers = makeRex("^/[/*][ \t]*BUG\\(.*\\):[ \t]*"); // BUG(uid):
|
||||||
bug_content = makeRex("[^ \n\r\t]+"); // at least one non-whitespace char
|
bug_content = makeRex("[^ \n\r\t]+"); // at least one non-whitespace char
|
||||||
}
|
}
|
||||||
|
|
||||||
// add package documentation
|
// add package documentation
|
||||||
|
|
@ -257,7 +257,7 @@ func (doc *docReader) addFile(src *ast.File) {
|
||||||
// comments correctly (but currently looses BUG(...)
|
// comments correctly (but currently looses BUG(...)
|
||||||
// comments).
|
// comments).
|
||||||
doc.doc = src.Doc;
|
doc.doc = src.Doc;
|
||||||
src.Doc = nil; // doc consumed - remove from ast.File node
|
src.Doc = nil; // doc consumed - remove from ast.File node
|
||||||
}
|
}
|
||||||
|
|
||||||
// add all declarations
|
// add all declarations
|
||||||
|
|
@ -271,15 +271,15 @@ func (doc *docReader) addFile(src *ast.File) {
|
||||||
cstr := string(text);
|
cstr := string(text);
|
||||||
if m := bug_markers.ExecuteString(cstr); len(m) > 0 {
|
if m := bug_markers.ExecuteString(cstr); len(m) > 0 {
|
||||||
// found a BUG comment; maybe empty
|
// found a BUG comment; maybe empty
|
||||||
if bstr := cstr[m[1] : len(cstr)]; bug_content.MatchString(bstr) {
|
if bstr := cstr[m[1]:len(cstr)]; bug_content.MatchString(bstr) {
|
||||||
// non-empty BUG comment; collect comment without BUG prefix
|
// non-empty BUG comment; collect comment without BUG prefix
|
||||||
list := copyCommentList(c.List);
|
list := copyCommentList(c.List);
|
||||||
list[0].Text = text[m[1] : len(text)];
|
list[0].Text = text[m[1]:len(text)];
|
||||||
doc.bugs.Push(&ast.CommentGroup{list, nil});
|
doc.bugs.Push(&ast.CommentGroup{list, nil});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
src.Comments = nil; // consumed unassociated comments - remove from ast.File node
|
src.Comments = nil; // consumed unassociated comments - remove from ast.File node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -312,19 +312,24 @@ func NewPackageDoc(pkg *ast.Package, importpath string) *PackageDoc {
|
||||||
// values, either vars or consts.
|
// values, either vars or consts.
|
||||||
//
|
//
|
||||||
type ValueDoc struct {
|
type ValueDoc struct {
|
||||||
Doc string;
|
Doc string;
|
||||||
Decl *ast.GenDecl;
|
Decl *ast.GenDecl;
|
||||||
order int;
|
order int;
|
||||||
}
|
}
|
||||||
|
|
||||||
type sortValueDoc []*ValueDoc
|
type sortValueDoc []*ValueDoc
|
||||||
func (p sortValueDoc) Len() int { return len(p); }
|
|
||||||
func (p sortValueDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
|
func (p sortValueDoc) Len() int {
|
||||||
|
return len(p);
|
||||||
|
}
|
||||||
|
func (p sortValueDoc) Swap(i, j int) {
|
||||||
|
p[i], p[j] = p[j], p[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
func declName(d *ast.GenDecl) string {
|
func declName(d *ast.GenDecl) string {
|
||||||
if len(d.Specs) != 1 {
|
if len(d.Specs) != 1 {
|
||||||
return ""
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
switch v := d.Specs[0].(type) {
|
switch v := d.Specs[0].(type) {
|
||||||
|
|
@ -350,17 +355,17 @@ func (p sortValueDoc) Less(i, j int) bool {
|
||||||
|
|
||||||
|
|
||||||
func makeValueDocs(v *vector.Vector, tok token.Token) []*ValueDoc {
|
func makeValueDocs(v *vector.Vector, tok token.Token) []*ValueDoc {
|
||||||
d := make([]*ValueDoc, v.Len()); // big enough in any case
|
d := make([]*ValueDoc, v.Len()); // big enough in any case
|
||||||
n := 0;
|
n := 0;
|
||||||
for i := range d {
|
for i := range d {
|
||||||
decl := v.At(i).(*ast.GenDecl);
|
decl := v.At(i).(*ast.GenDecl);
|
||||||
if decl.Tok == tok {
|
if decl.Tok == tok {
|
||||||
d[n] = &ValueDoc{CommentText(decl.Doc), decl, i};
|
d[n] = &ValueDoc{CommentText(decl.Doc), decl, i};
|
||||||
n++;
|
n++;
|
||||||
decl.Doc = nil; // doc consumed - removed from AST
|
decl.Doc = nil; // doc consumed - removed from AST
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
d = d[0 : n];
|
d = d[0:n];
|
||||||
sort.Sort(sortValueDoc(d));
|
sort.Sort(sortValueDoc(d));
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
|
|
@ -370,25 +375,32 @@ func makeValueDocs(v *vector.Vector, tok token.Token) []*ValueDoc {
|
||||||
// either a top-level function or a method function.
|
// either a top-level function or a method function.
|
||||||
//
|
//
|
||||||
type FuncDoc struct {
|
type FuncDoc struct {
|
||||||
Doc string;
|
Doc string;
|
||||||
Recv ast.Expr; // TODO(rsc): Would like string here
|
Recv ast.Expr; // TODO(rsc): Would like string here
|
||||||
Name string;
|
Name string;
|
||||||
Decl *ast.FuncDecl;
|
Decl *ast.FuncDecl;
|
||||||
}
|
}
|
||||||
|
|
||||||
type sortFuncDoc []*FuncDoc
|
type sortFuncDoc []*FuncDoc
|
||||||
func (p sortFuncDoc) Len() int { return len(p); }
|
|
||||||
func (p sortFuncDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
|
func (p sortFuncDoc) Len() int {
|
||||||
func (p sortFuncDoc) Less(i, j int) bool { return p[i].Name < p[j].Name; }
|
return len(p);
|
||||||
|
}
|
||||||
|
func (p sortFuncDoc) Swap(i, j int) {
|
||||||
|
p[i], p[j] = p[j], p[i];
|
||||||
|
}
|
||||||
|
func (p sortFuncDoc) Less(i, j int) bool {
|
||||||
|
return p[i].Name < p[j].Name;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
func makeFuncDocs(m map[string] *ast.FuncDecl) []*FuncDoc {
|
func makeFuncDocs(m map[string]*ast.FuncDecl) []*FuncDoc {
|
||||||
d := make([]*FuncDoc, len(m));
|
d := make([]*FuncDoc, len(m));
|
||||||
i := 0;
|
i := 0;
|
||||||
for _, f := range m {
|
for _, f := range m {
|
||||||
doc := new(FuncDoc);
|
doc := new(FuncDoc);
|
||||||
doc.Doc = CommentText(f.Doc);
|
doc.Doc = CommentText(f.Doc);
|
||||||
f.Doc = nil; // doc consumed - remove from ast.FuncDecl node
|
f.Doc = nil; // doc consumed - remove from ast.FuncDecl node
|
||||||
if f.Recv != nil {
|
if f.Recv != nil {
|
||||||
doc.Recv = f.Recv.Type;
|
doc.Recv = f.Recv.Type;
|
||||||
}
|
}
|
||||||
|
|
@ -407,19 +419,24 @@ func makeFuncDocs(m map[string] *ast.FuncDecl) []*FuncDoc {
|
||||||
// Factories is a sorted list of factory functions that return that type.
|
// Factories is a sorted list of factory functions that return that type.
|
||||||
// Methods is a sorted list of method functions on that type.
|
// Methods is a sorted list of method functions on that type.
|
||||||
type TypeDoc struct {
|
type TypeDoc struct {
|
||||||
Doc string;
|
Doc string;
|
||||||
Type *ast.TypeSpec;
|
Type *ast.TypeSpec;
|
||||||
Consts []*ValueDoc;
|
Consts []*ValueDoc;
|
||||||
Vars []*ValueDoc;
|
Vars []*ValueDoc;
|
||||||
Factories []*FuncDoc;
|
Factories []*FuncDoc;
|
||||||
Methods []*FuncDoc;
|
Methods []*FuncDoc;
|
||||||
Decl *ast.GenDecl;
|
Decl *ast.GenDecl;
|
||||||
order int;
|
order int;
|
||||||
}
|
}
|
||||||
|
|
||||||
type sortTypeDoc []*TypeDoc
|
type sortTypeDoc []*TypeDoc
|
||||||
func (p sortTypeDoc) Len() int { return len(p); }
|
|
||||||
func (p sortTypeDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
|
func (p sortTypeDoc) Len() int {
|
||||||
|
return len(p);
|
||||||
|
}
|
||||||
|
func (p sortTypeDoc) Swap(i, j int) {
|
||||||
|
p[i], p[j] = p[j], p[i];
|
||||||
|
}
|
||||||
func (p sortTypeDoc) Less(i, j int) bool {
|
func (p sortTypeDoc) Less(i, j int) bool {
|
||||||
// sort by name
|
// sort by name
|
||||||
// pull blocks (name = "") up to top
|
// pull blocks (name = "") up to top
|
||||||
|
|
@ -434,7 +451,7 @@ func (p sortTypeDoc) Less(i, j int) bool {
|
||||||
// NOTE(rsc): This would appear not to be correct for type ( )
|
// NOTE(rsc): This would appear not to be correct for type ( )
|
||||||
// blocks, but the doc extractor above has split them into
|
// blocks, but the doc extractor above has split them into
|
||||||
// individual declarations.
|
// individual declarations.
|
||||||
func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
|
func (doc *docReader) makeTypeDocs(m map[string]*typeDoc) []*TypeDoc {
|
||||||
d := make([]*TypeDoc, len(m));
|
d := make([]*TypeDoc, len(m));
|
||||||
i := 0;
|
i := 0;
|
||||||
for _, old := range m {
|
for _, old := range m {
|
||||||
|
|
@ -445,12 +462,12 @@ func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
|
||||||
typespec := decl.Specs[0].(*ast.TypeSpec);
|
typespec := decl.Specs[0].(*ast.TypeSpec);
|
||||||
t := new(TypeDoc);
|
t := new(TypeDoc);
|
||||||
doc := typespec.Doc;
|
doc := typespec.Doc;
|
||||||
typespec.Doc = nil; // doc consumed - remove from ast.TypeSpec node
|
typespec.Doc = nil; // doc consumed - remove from ast.TypeSpec node
|
||||||
if doc == nil {
|
if doc == nil {
|
||||||
// no doc associated with the spec, use the declaration doc, if any
|
// no doc associated with the spec, use the declaration doc, if any
|
||||||
doc = decl.Doc;
|
doc = decl.Doc;
|
||||||
}
|
}
|
||||||
decl.Doc = nil; // doc consumed - remove from ast.Decl node
|
decl.Doc = nil; // doc consumed - remove from ast.Decl node
|
||||||
t.Doc = CommentText(doc);
|
t.Doc = CommentText(doc);
|
||||||
t.Type = typespec;
|
t.Type = typespec;
|
||||||
t.Consts = makeValueDocs(old.values, token.CONST);
|
t.Consts = makeValueDocs(old.values, token.CONST);
|
||||||
|
|
@ -482,7 +499,7 @@ func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
d = d[0 : i]; // some types may have been ignored
|
d = d[0:i]; // some types may have been ignored
|
||||||
sort.Sort(sortTypeDoc(d));
|
sort.Sort(sortTypeDoc(d));
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
|
|
@ -500,16 +517,16 @@ func makeBugDocs(v *vector.Vector) []string {
|
||||||
// PackageDoc is the documentation for an entire package.
|
// PackageDoc is the documentation for an entire package.
|
||||||
//
|
//
|
||||||
type PackageDoc struct {
|
type PackageDoc struct {
|
||||||
PackageName string;
|
PackageName string;
|
||||||
ImportPath string;
|
ImportPath string;
|
||||||
FilePath string;
|
FilePath string;
|
||||||
Filenames []string;
|
Filenames []string;
|
||||||
Doc string;
|
Doc string;
|
||||||
Consts []*ValueDoc;
|
Consts []*ValueDoc;
|
||||||
Types []*TypeDoc;
|
Types []*TypeDoc;
|
||||||
Vars []*ValueDoc;
|
Vars []*ValueDoc;
|
||||||
Funcs []*FuncDoc;
|
Funcs []*FuncDoc;
|
||||||
Bugs []string;
|
Bugs []string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -544,11 +561,11 @@ func isRegexp(s string) bool {
|
||||||
for _, c := range s {
|
for _, c := range s {
|
||||||
for _, m := range metachars {
|
for _, m := range metachars {
|
||||||
if c == m {
|
if c == m {
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -594,7 +611,7 @@ func filterValueDocs(a []*ValueDoc, names []string) []*ValueDoc {
|
||||||
w++;
|
w++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return a[0 : w];
|
return a[0:w];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -606,7 +623,7 @@ func filterFuncDocs(a []*FuncDoc, names []string) []*FuncDoc {
|
||||||
w++;
|
w++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return a[0 : w];
|
return a[0:w];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -627,7 +644,7 @@ func filterTypeDocs(a []*TypeDoc, names []string) []*TypeDoc {
|
||||||
w++;
|
w++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return a[0 : w];
|
return a[0:w];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -643,4 +660,3 @@ func (p *PackageDoc) Filter(names []string) {
|
||||||
p.Funcs = filterFuncDocs(p.Funcs, names);
|
p.Funcs = filterFuncDocs(p.Funcs, names);
|
||||||
p.Doc = ""; // don't show top-level package doc
|
p.Doc = ""; // don't show top-level package doc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ type encoderState struct {
|
||||||
b *bytes.Buffer;
|
b *bytes.Buffer;
|
||||||
err os.Error; // error encountered during encoding;
|
err os.Error; // error encountered during encoding;
|
||||||
fieldnum int; // the last field number written.
|
fieldnum int; // the last field number written.
|
||||||
buf [1+uint64Size]byte; // buffer used by the encoder; here to avoid allocation.
|
buf [1 + uint64Size]byte; // buffer used by the encoder; here to avoid allocation.
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unsigned integers have a two-state encoding. If the number is less
|
// Unsigned integers have a two-state encoding. If the number is less
|
||||||
|
|
@ -49,7 +49,7 @@ func encodeUint(state *encoderState, x uint64) {
|
||||||
m--;
|
m--;
|
||||||
}
|
}
|
||||||
state.buf[m] = uint8(-(n-1));
|
state.buf[m] = uint8(-(n-1));
|
||||||
n, state.err = state.b.Write(state.buf[m : uint64Size+1]);
|
n, state.err = state.b.Write(state.buf[m : uint64Size + 1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// encodeInt writes an encoded signed integer to state.w.
|
// encodeInt writes an encoded signed integer to state.w.
|
||||||
|
|
@ -58,7 +58,7 @@ func encodeUint(state *encoderState, x uint64) {
|
||||||
func encodeInt(state *encoderState, i int64) {
|
func encodeInt(state *encoderState, i int64) {
|
||||||
var x uint64;
|
var x uint64;
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
x = uint64(^i << 1) | 1;
|
x = uint64(^i << 1)|1;
|
||||||
} else {
|
} else {
|
||||||
x = uint64(i<<1);
|
x = uint64(i<<1);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ func (r *Response) AddHeader(key, value string) {
|
||||||
|
|
||||||
oldValues, oldValuesPresent := r.Header[key];
|
oldValues, oldValuesPresent := r.Header[key];
|
||||||
if oldValuesPresent {
|
if oldValuesPresent {
|
||||||
r.Header[key] = oldValues+","+value;
|
r.Header[key] = oldValues + "," + value;
|
||||||
} else {
|
} else {
|
||||||
r.Header[key] = value;
|
r.Header[key] = value;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -21,25 +21,26 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
maxLineLength = 1024; // assumed < bufio.DefaultBufSize
|
maxLineLength = 1024; // assumed < bufio.DefaultBufSize
|
||||||
maxValueLength = 1024;
|
maxValueLength = 1024;
|
||||||
maxHeaderLines = 1024;
|
maxHeaderLines = 1024;
|
||||||
chunkSize = 4 << 10; // 4 KB chunks
|
chunkSize = 4<<10; // 4 KB chunks
|
||||||
)
|
)
|
||||||
|
|
||||||
// HTTP request parsing errors.
|
// HTTP request parsing errors.
|
||||||
type ProtocolError struct {
|
type ProtocolError struct {
|
||||||
os.ErrorString
|
os.ErrorString;
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ErrLineTooLong = &ProtocolError{"header line too long"};
|
ErrLineTooLong = &ProtocolError{"header line too long"};
|
||||||
ErrHeaderTooLong = &ProtocolError{"header too long"};
|
ErrHeaderTooLong = &ProtocolError{"header too long"};
|
||||||
ErrShortBody = &ProtocolError{"entity body too short"};
|
ErrShortBody = &ProtocolError{"entity body too short"};
|
||||||
)
|
)
|
||||||
|
|
||||||
type badStringError struct {
|
type badStringError struct {
|
||||||
what string;
|
what string;
|
||||||
str string;
|
str string;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *badStringError) String() string {
|
func (e *badStringError) String() string {
|
||||||
|
|
@ -48,12 +49,12 @@ func (e *badStringError) String() string {
|
||||||
|
|
||||||
// A Request represents a parsed HTTP request header.
|
// A Request represents a parsed HTTP request header.
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Method string; // GET, POST, PUT, etc.
|
Method string; // GET, POST, PUT, etc.
|
||||||
RawUrl string; // The raw URL given in the request.
|
RawUrl string; // The raw URL given in the request.
|
||||||
Url *URL; // Parsed URL.
|
Url *URL; // Parsed URL.
|
||||||
Proto string; // "HTTP/1.0"
|
Proto string; // "HTTP/1.0"
|
||||||
ProtoMajor int; // 1
|
ProtoMajor int; // 1
|
||||||
ProtoMinor int; // 0
|
ProtoMinor int; // 0
|
||||||
|
|
||||||
// A header mapping request lines to their values.
|
// A header mapping request lines to their values.
|
||||||
// If the header says
|
// If the header says
|
||||||
|
|
@ -74,18 +75,18 @@ type Request struct {
|
||||||
// The request parser implements this by canonicalizing the
|
// The request parser implements this by canonicalizing the
|
||||||
// name, making the first character and any characters
|
// name, making the first character and any characters
|
||||||
// following a hyphen uppercase and the rest lowercase.
|
// following a hyphen uppercase and the rest lowercase.
|
||||||
Header map[string] string;
|
Header map[string]string;
|
||||||
|
|
||||||
// The message body.
|
// The message body.
|
||||||
Body io.Reader;
|
Body io.Reader;
|
||||||
|
|
||||||
// Whether to close the connection after replying to this request.
|
// Whether to close the connection after replying to this request.
|
||||||
Close bool;
|
Close bool;
|
||||||
|
|
||||||
// The host on which the URL is sought.
|
// The host on which the URL is sought.
|
||||||
// Per RFC 2616, this is either the value of the Host: header
|
// Per RFC 2616, this is either the value of the Host: header
|
||||||
// or the host name given in the URL itself.
|
// or the host name given in the URL itself.
|
||||||
Host string;
|
Host string;
|
||||||
|
|
||||||
// The referring URL, if sent in the request.
|
// The referring URL, if sent in the request.
|
||||||
//
|
//
|
||||||
|
|
@ -97,21 +98,20 @@ type Request struct {
|
||||||
// can diagnose programs that use the alternate
|
// can diagnose programs that use the alternate
|
||||||
// (correct English) spelling req.Referrer but cannot
|
// (correct English) spelling req.Referrer but cannot
|
||||||
// diagnose programs that use Header["Referrer"].
|
// diagnose programs that use Header["Referrer"].
|
||||||
Referer string;
|
Referer string;
|
||||||
|
|
||||||
// The User-Agent: header string, if sent in the request.
|
// The User-Agent: header string, if sent in the request.
|
||||||
UserAgent string;
|
UserAgent string;
|
||||||
|
|
||||||
// The parsed form. Only available after ParseForm is called.
|
// The parsed form. Only available after ParseForm is called.
|
||||||
Form map[string] []string;
|
Form map[string][]string;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProtoAtLeast returns whether the HTTP protocol used
|
// ProtoAtLeast returns whether the HTTP protocol used
|
||||||
// in the request is at least major.minor.
|
// in the request is at least major.minor.
|
||||||
func (r *Request) ProtoAtLeast(major, minor int) bool {
|
func (r *Request) ProtoAtLeast(major, minor int) bool {
|
||||||
return r.ProtoMajor > major ||
|
return r.ProtoMajor > major ||
|
||||||
r.ProtoMajor == major && r.ProtoMinor >= minor
|
r.ProtoMajor == major && r.ProtoMinor >= minor;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return value if nonempty, def otherwise.
|
// Return value if nonempty, def otherwise.
|
||||||
|
|
@ -123,7 +123,7 @@ func valueOrDefault(value, def string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(rsc): Change default UserAgent before open-source release.
|
// TODO(rsc): Change default UserAgent before open-source release.
|
||||||
const defaultUserAgent = "http.Client";
|
const defaultUserAgent = "http.Client"
|
||||||
|
|
||||||
// Write an HTTP/1.1 request -- header and body -- in wire format.
|
// Write an HTTP/1.1 request -- header and body -- in wire format.
|
||||||
// This method consults the following fields of req:
|
// This method consults the following fields of req:
|
||||||
|
|
@ -162,7 +162,7 @@ func (req *Request) write(w io.Writer) os.Error {
|
||||||
// Response.{GetHeader,AddHeader} and string constants for "Host",
|
// Response.{GetHeader,AddHeader} and string constants for "Host",
|
||||||
// "User-Agent" and "Referer".
|
// "User-Agent" and "Referer".
|
||||||
for k, v := range req.Header {
|
for k, v := range req.Header {
|
||||||
io.WriteString(w, k + ": " + v + "\r\n");
|
io.WriteString(w, k+": "+v+"\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, "\r\n");
|
io.WriteString(w, "\r\n");
|
||||||
|
|
@ -183,7 +183,7 @@ func (req *Request) write(w io.Writer) os.Error {
|
||||||
switch {
|
switch {
|
||||||
case er != nil:
|
case er != nil:
|
||||||
if er == os.EOF {
|
if er == os.EOF {
|
||||||
break Loop
|
break Loop;
|
||||||
}
|
}
|
||||||
return er;
|
return er;
|
||||||
case ew != nil:
|
case ew != nil:
|
||||||
|
|
@ -210,29 +210,29 @@ func readLineBytes(b *bufio.Reader) (p []byte, err os.Error) {
|
||||||
if err == os.EOF {
|
if err == os.EOF {
|
||||||
err = io.ErrUnexpectedEOF;
|
err = io.ErrUnexpectedEOF;
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err;
|
||||||
}
|
}
|
||||||
if len(p) >= maxLineLength {
|
if len(p) >= maxLineLength {
|
||||||
return nil, ErrLineTooLong
|
return nil, ErrLineTooLong;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Chop off trailing white space.
|
// Chop off trailing white space.
|
||||||
var i int;
|
var i int;
|
||||||
for i = len(p); i > 0; i-- {
|
for i = len(p); i > 0; i-- {
|
||||||
if c := p[i-1]; c != ' ' && c != '\r' && c != '\t' && c != '\n' {
|
if c := p[i-1]; c != ' ' && c != '\r' && c != '\t' && c != '\n' {
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return p[0:i], nil
|
return p[0:i], nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// readLineBytes, but convert the bytes into a string.
|
// readLineBytes, but convert the bytes into a string.
|
||||||
func readLine(b *bufio.Reader) (s string, err os.Error) {
|
func readLine(b *bufio.Reader) (s string, err os.Error) {
|
||||||
p, e := readLineBytes(b);
|
p, e := readLineBytes(b);
|
||||||
if e != nil {
|
if e != nil {
|
||||||
return "", e
|
return "", e;
|
||||||
}
|
}
|
||||||
return string(p), nil
|
return string(p), nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
var colon = []byte{':'}
|
var colon = []byte{':'}
|
||||||
|
|
@ -244,10 +244,10 @@ var colon = []byte{':'}
|
||||||
func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
|
func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
|
||||||
line, e := readLineBytes(b);
|
line, e := readLineBytes(b);
|
||||||
if e != nil {
|
if e != nil {
|
||||||
return "", "", e
|
return "", "", e;
|
||||||
}
|
}
|
||||||
if len(line) == 0 {
|
if len(line) == 0 {
|
||||||
return "", "", nil
|
return "", "", nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scan first line for colon.
|
// Scan first line for colon.
|
||||||
|
|
@ -265,7 +265,7 @@ func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
|
||||||
// Skip initial space before value.
|
// Skip initial space before value.
|
||||||
for i++; i < len(line); i++ {
|
for i++; i < len(line); i++ {
|
||||||
if line[i] != ' ' {
|
if line[i] != ' ' {
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
value = string(line[i:len(line)]);
|
value = string(line[i:len(line)]);
|
||||||
|
|
@ -286,16 +286,16 @@ func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
|
||||||
if e == os.EOF {
|
if e == os.EOF {
|
||||||
e = io.ErrUnexpectedEOF;
|
e = io.ErrUnexpectedEOF;
|
||||||
}
|
}
|
||||||
return "", "", e
|
return "", "", e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
b.UnreadByte();
|
b.UnreadByte();
|
||||||
|
|
||||||
// Read the rest of the line and add to value.
|
// Read the rest of the line and add to value.
|
||||||
if line, e = readLineBytes(b); e != nil {
|
if line, e = readLineBytes(b); e != nil {
|
||||||
return "", "", e
|
return "", "", e;
|
||||||
}
|
}
|
||||||
value += " " + string(line);
|
value += " "+string(line);
|
||||||
|
|
||||||
if len(value) >= maxValueLength {
|
if len(value) >= maxValueLength {
|
||||||
return "", "", &badStringError{"value too long for key", key};
|
return "", "", &badStringError{"value too long for key", key};
|
||||||
|
|
@ -313,33 +313,33 @@ Malformed:
|
||||||
func atoi(s string, i int) (n, i1 int, ok bool) {
|
func atoi(s string, i int) (n, i1 int, ok bool) {
|
||||||
const Big = 1000000;
|
const Big = 1000000;
|
||||||
if i >= len(s) || s[i] < '0' || s[i] > '9' {
|
if i >= len(s) || s[i] < '0' || s[i] > '9' {
|
||||||
return 0, 0, false
|
return 0, 0, false;
|
||||||
}
|
}
|
||||||
n = 0;
|
n = 0;
|
||||||
for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ {
|
for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ {
|
||||||
n = n*10 + int(s[i]-'0');
|
n = n*10 + int(s[i]-'0');
|
||||||
if n > Big {
|
if n > Big {
|
||||||
return 0, 0, false
|
return 0, 0, false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return n, i, true
|
return n, i, true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse HTTP version: "HTTP/1.2" -> (1, 2, true).
|
// Parse HTTP version: "HTTP/1.2" -> (1, 2, true).
|
||||||
func parseHTTPVersion(vers string) (int, int, bool) {
|
func parseHTTPVersion(vers string) (int, int, bool) {
|
||||||
if vers[0:5] != "HTTP/" {
|
if vers[0:5] != "HTTP/" {
|
||||||
return 0, 0, false
|
return 0, 0, false;
|
||||||
}
|
}
|
||||||
major, i, ok := atoi(vers, 5);
|
major, i, ok := atoi(vers, 5);
|
||||||
if !ok || i >= len(vers) || vers[i] != '.' {
|
if !ok || i >= len(vers) || vers[i] != '.' {
|
||||||
return 0, 0, false
|
return 0, 0, false;
|
||||||
}
|
}
|
||||||
var minor int;
|
var minor int;
|
||||||
minor, i, ok = atoi(vers, i+1);
|
minor, i, ok = atoi(vers, i+1);
|
||||||
if !ok || i != len(vers) {
|
if !ok || i != len(vers) {
|
||||||
return 0, 0, false
|
return 0, 0, false;
|
||||||
}
|
}
|
||||||
return major, minor, true
|
return major, minor, true;
|
||||||
}
|
}
|
||||||
|
|
||||||
var cmap = make(map[string]string)
|
var cmap = make(map[string]string)
|
||||||
|
|
@ -360,12 +360,12 @@ func CanonicalHeaderKey(s string) string {
|
||||||
// HTTP headers are ASCII only, so no Unicode issues.
|
// HTTP headers are ASCII only, so no Unicode issues.
|
||||||
a := strings.Bytes(s);
|
a := strings.Bytes(s);
|
||||||
upper := true;
|
upper := true;
|
||||||
for i,v := range a {
|
for i, v := range a {
|
||||||
if upper && 'a' <= v && v <= 'z' {
|
if upper && 'a' <= v && v <= 'z' {
|
||||||
a[i] = v + 'A' - 'a';
|
a[i] = v+'A'-'a';
|
||||||
}
|
}
|
||||||
if !upper && 'A' <= v && v <= 'Z' {
|
if !upper && 'A' <= v && v <= 'Z' {
|
||||||
a[i] = v + 'a' - 'A';
|
a[i] = v+'a'-'A';
|
||||||
}
|
}
|
||||||
upper = false;
|
upper = false;
|
||||||
if v == '-' {
|
if v == '-' {
|
||||||
|
|
@ -378,13 +378,13 @@ func CanonicalHeaderKey(s string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type chunkedReader struct {
|
type chunkedReader struct {
|
||||||
r *bufio.Reader;
|
r *bufio.Reader;
|
||||||
n uint64; // unread bytes in chunk
|
n uint64; // unread bytes in chunk
|
||||||
err os.Error;
|
err os.Error;
|
||||||
}
|
}
|
||||||
|
|
||||||
func newChunkedReader(r *bufio.Reader) *chunkedReader {
|
func newChunkedReader(r *bufio.Reader) *chunkedReader {
|
||||||
return &chunkedReader{ r: r }
|
return &chunkedReader{r: r};
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cr *chunkedReader) beginChunk() {
|
func (cr *chunkedReader) beginChunk() {
|
||||||
|
|
@ -392,21 +392,21 @@ func (cr *chunkedReader) beginChunk() {
|
||||||
var line string;
|
var line string;
|
||||||
line, cr.err = readLine(cr.r);
|
line, cr.err = readLine(cr.r);
|
||||||
if cr.err != nil {
|
if cr.err != nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
cr.n, cr.err = strconv.Btoui64(line, 16);
|
cr.n, cr.err = strconv.Btoui64(line, 16);
|
||||||
if cr.err != nil {
|
if cr.err != nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
if cr.n == 0 {
|
if cr.n == 0 {
|
||||||
// trailer CRLF
|
// trailer CRLF
|
||||||
for {
|
for {
|
||||||
line, cr.err = readLine(cr.r);
|
line, cr.err = readLine(cr.r);
|
||||||
if cr.err != nil {
|
if cr.err != nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
if line == "" {
|
if line == "" {
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cr.err = os.EOF;
|
cr.err = os.EOF;
|
||||||
|
|
@ -415,16 +415,16 @@ func (cr *chunkedReader) beginChunk() {
|
||||||
|
|
||||||
func (cr *chunkedReader) Read(b []uint8) (n int, err os.Error) {
|
func (cr *chunkedReader) Read(b []uint8) (n int, err os.Error) {
|
||||||
if cr.err != nil {
|
if cr.err != nil {
|
||||||
return 0, cr.err
|
return 0, cr.err;
|
||||||
}
|
}
|
||||||
if cr.n == 0 {
|
if cr.n == 0 {
|
||||||
cr.beginChunk();
|
cr.beginChunk();
|
||||||
if cr.err != nil {
|
if cr.err != nil {
|
||||||
return 0, cr.err
|
return 0, cr.err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if uint64(len(b)) > cr.n {
|
if uint64(len(b)) > cr.n {
|
||||||
b = b[0:cr.n];
|
b = b[0 : cr.n];
|
||||||
}
|
}
|
||||||
n, cr.err = cr.r.Read(b);
|
n, cr.err = cr.r.Read(b);
|
||||||
cr.n -= uint64(n);
|
cr.n -= uint64(n);
|
||||||
|
|
@ -437,7 +437,7 @@ func (cr *chunkedReader) Read(b []uint8) (n int, err os.Error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return n, cr.err
|
return n, cr.err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadRequest reads and parses a request from b.
|
// ReadRequest reads and parses a request from b.
|
||||||
|
|
@ -447,7 +447,7 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
// First line: GET /index.html HTTP/1.0
|
// First line: GET /index.html HTTP/1.0
|
||||||
var s string;
|
var s string;
|
||||||
if s, err = readLine(b); err != nil {
|
if s, err = readLine(b); err != nil {
|
||||||
return nil, err
|
return nil, err;
|
||||||
}
|
}
|
||||||
|
|
||||||
var f []string;
|
var f []string;
|
||||||
|
|
@ -461,22 +461,22 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if req.Url, err = ParseURL(req.RawUrl); err != nil {
|
if req.Url, err = ParseURL(req.RawUrl); err != nil {
|
||||||
return nil, err
|
return nil, err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Subsequent lines: Key: value.
|
// Subsequent lines: Key: value.
|
||||||
nheader := 0;
|
nheader := 0;
|
||||||
req.Header = make(map[string] string);
|
req.Header = make(map[string]string);
|
||||||
for {
|
for {
|
||||||
var key, value string;
|
var key, value string;
|
||||||
if key, value, err = readKeyValue(b); err != nil {
|
if key, value, err = readKeyValue(b); err != nil {
|
||||||
return nil, err
|
return nil, err;
|
||||||
}
|
}
|
||||||
if key == "" {
|
if key == "" {
|
||||||
break
|
break;
|
||||||
}
|
}
|
||||||
if nheader++; nheader >= maxHeaderLines {
|
if nheader++; nheader >= maxHeaderLines {
|
||||||
return nil, ErrHeaderTooLong
|
return nil, ErrHeaderTooLong;
|
||||||
}
|
}
|
||||||
|
|
||||||
key = CanonicalHeaderKey(key);
|
key = CanonicalHeaderKey(key);
|
||||||
|
|
@ -486,9 +486,9 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
// to concatenating the values separated by commas.
|
// to concatenating the values separated by commas.
|
||||||
oldvalue, present := req.Header[key];
|
oldvalue, present := req.Header[key];
|
||||||
if present {
|
if present {
|
||||||
req.Header[key] = oldvalue+","+value
|
req.Header[key] = oldvalue+","+value;
|
||||||
} else {
|
} else {
|
||||||
req.Header[key] = value
|
req.Header[key] = value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -500,7 +500,7 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
// Host: doesntmatter
|
// Host: doesntmatter
|
||||||
// the same. In the second case, any Host line is ignored.
|
// the same. In the second case, any Host line is ignored.
|
||||||
if v, present := req.Header["Host"]; present && req.Url.Host == "" {
|
if v, present := req.Header["Host"]; present && req.Url.Host == "" {
|
||||||
req.Host = v
|
req.Host = v;
|
||||||
}
|
}
|
||||||
|
|
||||||
// RFC2616: Should treat
|
// RFC2616: Should treat
|
||||||
|
|
@ -509,27 +509,27 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
// Cache-Control: no-cache
|
// Cache-Control: no-cache
|
||||||
if v, present := req.Header["Pragma"]; present && v == "no-cache" {
|
if v, present := req.Header["Pragma"]; present && v == "no-cache" {
|
||||||
if _, presentcc := req.Header["Cache-Control"]; !presentcc {
|
if _, presentcc := req.Header["Cache-Control"]; !presentcc {
|
||||||
req.Header["Cache-Control"] = "no-cache"
|
req.Header["Cache-Control"] = "no-cache";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine whether to hang up after sending the reply.
|
// Determine whether to hang up after sending the reply.
|
||||||
if req.ProtoMajor < 1 || (req.ProtoMajor == 1 && req.ProtoMinor < 1) {
|
if req.ProtoMajor < 1 || (req.ProtoMajor == 1 && req.ProtoMinor < 1) {
|
||||||
req.Close = true
|
req.Close = true;
|
||||||
} else if v, present := req.Header["Connection"]; present {
|
} else if v, present := req.Header["Connection"]; present {
|
||||||
// TODO: Should split on commas, toss surrounding white space,
|
// TODO: Should split on commas, toss surrounding white space,
|
||||||
// and check each field.
|
// and check each field.
|
||||||
if v == "close" {
|
if v == "close" {
|
||||||
req.Close = true
|
req.Close = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pull out useful fields as a convenience to clients.
|
// Pull out useful fields as a convenience to clients.
|
||||||
if v, present := req.Header["Referer"]; present {
|
if v, present := req.Header["Referer"]; present {
|
||||||
req.Referer = v
|
req.Referer = v;
|
||||||
}
|
}
|
||||||
if v, present := req.Header["User-Agent"]; present {
|
if v, present := req.Header["User-Agent"]; present {
|
||||||
req.UserAgent = v
|
req.UserAgent = v;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Parse specific header values:
|
// TODO: Parse specific header values:
|
||||||
|
|
@ -571,16 +571,16 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
|
||||||
raw := make([]byte, length);
|
raw := make([]byte, length);
|
||||||
n, err := b.Read(raw);
|
n, err := b.Read(raw);
|
||||||
if err != nil || uint64(n) < length {
|
if err != nil || uint64(n) < length {
|
||||||
return nil, ErrShortBody
|
return nil, ErrShortBody;
|
||||||
}
|
}
|
||||||
req.Body = bytes.NewBuffer(raw);
|
req.Body = bytes.NewBuffer(raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
return req, nil
|
return req, nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseForm(query string) (m map[string] []string, err os.Error) {
|
func parseForm(query string) (m map[string][]string, err os.Error) {
|
||||||
data := make(map[string] *vector.StringVector);
|
data := make(map[string]*vector.StringVector);
|
||||||
for _, kv := range strings.Split(query, "&", 0) {
|
for _, kv := range strings.Split(query, "&", 0) {
|
||||||
kvPair := strings.Split(kv, "=", 2);
|
kvPair := strings.Split(kv, "=", 2);
|
||||||
|
|
||||||
|
|
@ -602,19 +602,19 @@ func parseForm(query string) (m map[string] []string, err os.Error) {
|
||||||
vec.Push(value);
|
vec.Push(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
m = make(map[string] []string);
|
m = make(map[string][]string);
|
||||||
for k, vec := range data {
|
for k, vec := range data {
|
||||||
m[k] = vec.Data();
|
m[k] = vec.Data();
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseForm parses the request body as a form for POST requests, or the raw query for GET requests.
|
// ParseForm parses the request body as a form for POST requests, or the raw query for GET requests.
|
||||||
// It is idempotent.
|
// It is idempotent.
|
||||||
func (r *Request) ParseForm() (err os.Error) {
|
func (r *Request) ParseForm() (err os.Error) {
|
||||||
if r.Form != nil {
|
if r.Form != nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var query string;
|
var query string;
|
||||||
|
|
@ -624,23 +624,23 @@ func (r *Request) ParseForm() (err os.Error) {
|
||||||
query = r.Url.RawQuery;
|
query = r.Url.RawQuery;
|
||||||
case "POST":
|
case "POST":
|
||||||
if r.Body == nil {
|
if r.Body == nil {
|
||||||
return os.ErrorString("missing form body")
|
return os.ErrorString("missing form body");
|
||||||
}
|
}
|
||||||
ct, _ := r.Header["Content-Type"];
|
ct, _ := r.Header["Content-Type"];
|
||||||
switch strings.Split(ct, ";", 2)[0] {
|
switch strings.Split(ct, ";", 2)[0] {
|
||||||
case "text/plain", "application/x-www-form-urlencoded", "":
|
case "text/plain", "application/x-www-form-urlencoded", "":
|
||||||
var b []byte;
|
var b []byte;
|
||||||
if b, err = io.ReadAll(r.Body); err != nil {
|
if b, err = io.ReadAll(r.Body); err != nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
query = string(b);
|
query = string(b);
|
||||||
// TODO(dsymonds): Handle multipart/form-data
|
// TODO(dsymonds): Handle multipart/form-data
|
||||||
default:
|
default:
|
||||||
return &badStringError{"unknown Content-Type", ct}
|
return &badStringError{"unknown Content-Type", ct};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
r.Form, err = parseForm(query);
|
r.Form, err = parseForm(query);
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FormValue returns the first value for the named component of the query.
|
// FormValue returns the first value for the named component of the query.
|
||||||
|
|
@ -650,7 +650,7 @@ func (r *Request) FormValue(key string) string {
|
||||||
r.ParseForm();
|
r.ParseForm();
|
||||||
}
|
}
|
||||||
if vs, ok := r.Form[key]; ok && len(vs) > 0 {
|
if vs, ok := r.Form[key]; ok && len(vs) > 0 {
|
||||||
return vs[0]
|
return vs[0];
|
||||||
}
|
}
|
||||||
return ""
|
return "";
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -146,7 +146,7 @@ func (c *Conn) WriteHeader(code int) {
|
||||||
if !ok {
|
if !ok {
|
||||||
text = "status code " + codestring;
|
text = "status code " + codestring;
|
||||||
}
|
}
|
||||||
io.WriteString(c.buf, proto+" "+codestring+" "+text+"\r\n");
|
io.WriteString(c.buf, proto + " " + codestring + " " + text + "\r\n");
|
||||||
for k, v := range c.header {
|
for k, v := range c.header {
|
||||||
io.WriteString(c.buf, k+": "+v+"\r\n");
|
io.WriteString(c.buf, k+": "+v+"\r\n");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -203,7 +203,7 @@ func (d *decoder) idatReader(idat io.Reader) os.Error {
|
||||||
case ctPaletted:
|
case ctPaletted:
|
||||||
bpp = 1;
|
bpp = 1;
|
||||||
paletted = d.image.(*image.Paletted);
|
paletted = d.image.(*image.Paletted);
|
||||||
maxPalette = uint8(len(paletted.Palette) - 1);
|
maxPalette = uint8(len(paletted.Palette)-1);
|
||||||
case ctTrueColorAlpha:
|
case ctTrueColorAlpha:
|
||||||
bpp = 4;
|
bpp = 4;
|
||||||
nrgba = d.image.(*image.NRGBA);
|
nrgba = d.image.(*image.NRGBA);
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ func (p *pipe) Read(data []byte) (n int, err os.Error) {
|
||||||
data[i] = p.wpend[i];
|
data[i] = p.wpend[i];
|
||||||
}
|
}
|
||||||
p.wtot += n;
|
p.wtot += n;
|
||||||
p.wpend = p.wpend[n : len(p.wpend)];
|
p.wpend = p.wpend[n:len(p.wpend)];
|
||||||
|
|
||||||
// If write block is done, finish the write.
|
// If write block is done, finish the write.
|
||||||
if len(p.wpend) == 0 {
|
if len(p.wpend) == 0 {
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ func itoa(i int, wid int) string {
|
||||||
for ; u > 0 || wid > 0; u /= 10 {
|
for ; u > 0 || wid > 0; u /= 10 {
|
||||||
bp--;
|
bp--;
|
||||||
wid--;
|
wid--;
|
||||||
b[bp] = byte(u%10) + '0';
|
b[bp] = byte(u%10)+'0';
|
||||||
}
|
}
|
||||||
|
|
||||||
return string(b[bp:len(b)]);
|
return string(b[bp:len(b)]);
|
||||||
|
|
@ -91,12 +91,12 @@ func (l *Logger) formatHeader(ns int64, calldepth int) string {
|
||||||
if l.flag & (Ltime | Lmicroseconds) != 0 {
|
if l.flag & (Ltime | Lmicroseconds) != 0 {
|
||||||
h += itoa(t.Hour, 2) + ":" + itoa(t.Minute, 2) + ":" + itoa(t.Second, 2);
|
h += itoa(t.Hour, 2) + ":" + itoa(t.Minute, 2) + ":" + itoa(t.Second, 2);
|
||||||
if l.flag & Lmicroseconds != 0 {
|
if l.flag & Lmicroseconds != 0 {
|
||||||
h += "." + itoa(int(ns%1e9) / 1e3, 6);
|
h += "." + itoa(int(ns%1e9)/1e3, 6);
|
||||||
}
|
}
|
||||||
h += " ";
|
h += " ";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if l.flag & (Lshortfile|Llongfile) != 0 {
|
if l.flag & (Lshortfile | Llongfile) != 0 {
|
||||||
_, file, line, ok := runtime.Caller(calldepth);
|
_, file, line, ok := runtime.Caller(calldepth);
|
||||||
if ok {
|
if ok {
|
||||||
if l.flag & Lshortfile != 0 {
|
if l.flag & Lshortfile != 0 {
|
||||||
|
|
@ -131,7 +131,7 @@ func (l *Logger) Output(calldepth int, s string) {
|
||||||
if len(s) > 0 && s[len(s)-1] == '\n' {
|
if len(s) > 0 && s[len(s)-1] == '\n' {
|
||||||
newline = "";
|
newline = "";
|
||||||
}
|
}
|
||||||
s = l.formatHeader(now, calldepth+1) + s + newline;
|
s = l.formatHeader(now, calldepth + 1) + s + newline;
|
||||||
io.WriteString(l.out0, s);
|
io.WriteString(l.out0, s);
|
||||||
if l.out1 != nil {
|
if l.out1 != nil {
|
||||||
io.WriteString(l.out1, s);
|
io.WriteString(l.out1, s);
|
||||||
|
|
|
||||||
|
|
@ -36,9 +36,9 @@ var tests = []tester{
|
||||||
tester{Lok|Ltime, "", Rtime+" "},
|
tester{Lok|Ltime, "", Rtime+" "},
|
||||||
tester{Lok | Ltime | Lmicroseconds, "", Rtime + Rmicroseconds + " "},
|
tester{Lok | Ltime | Lmicroseconds, "", Rtime + Rmicroseconds + " "},
|
||||||
tester{Lok | Lmicroseconds, "", Rtime + Rmicroseconds + " "}, // microsec implies time
|
tester{Lok | Lmicroseconds, "", Rtime + Rmicroseconds + " "}, // microsec implies time
|
||||||
tester{Lok|Llongfile, "", Rlongfile+" "},
|
tester{Lok | Llongfile, "", Rlongfile + " "},
|
||||||
tester{Lok|Lshortfile, "", Rshortfile+" "},
|
tester{Lok | Lshortfile, "", Rshortfile + " "},
|
||||||
tester{Lok|Llongfile|Lshortfile, "", Rshortfile+" "}, // shortfile overrides longfile
|
tester{Lok | Llongfile | Lshortfile, "", Rshortfile + " "}, // shortfile overrides longfile
|
||||||
// everything at once:
|
// everything at once:
|
||||||
tester{Lok | Ldate | Ltime | Lmicroseconds | Llongfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rlongfile + " "},
|
tester{Lok | Ldate | Ltime | Lmicroseconds | Llongfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rlongfile + " "},
|
||||||
tester{Lok | Ldate | Ltime | Lmicroseconds | Lshortfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rshortfile + " "},
|
tester{Lok | Ldate | Ltime | Lmicroseconds | Lshortfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rshortfile + " "},
|
||||||
|
|
|
||||||
|
|
@ -18,13 +18,13 @@ var pow10tab [70]float64
|
||||||
// Pow10 returns 10**x, the base-10 exponential of x.
|
// Pow10 returns 10**x, the base-10 exponential of x.
|
||||||
func Pow10(e int) float64 {
|
func Pow10(e int) float64 {
|
||||||
if e < 0 {
|
if e < 0 {
|
||||||
return 1 / Pow10(-e);
|
return 1/Pow10(-e);
|
||||||
}
|
}
|
||||||
if e < len(pow10tab) {
|
if e < len(pow10tab) {
|
||||||
return pow10tab[e];
|
return pow10tab[e];
|
||||||
}
|
}
|
||||||
m := e/2;
|
m := e/2;
|
||||||
return Pow10(m) * Pow10(e-m);
|
return Pow10(m)*Pow10(e-m);
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ func Sinh(x float64) float64 {
|
||||||
temp = Exp(x)/2;
|
temp = Exp(x)/2;
|
||||||
|
|
||||||
case x > 0.5:
|
case x > 0.5:
|
||||||
temp = (Exp(x) - Exp(-x))/2;
|
temp = (Exp(x)-Exp(-x))/2;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
sq := x*x;
|
sq := x*x;
|
||||||
|
|
@ -64,5 +64,5 @@ func Cosh(x float64) float64 {
|
||||||
if x > 21 {
|
if x > 21 {
|
||||||
return Exp(x)/2;
|
return Exp(x)/2;
|
||||||
}
|
}
|
||||||
return (Exp(x) + Exp(-x))/2;
|
return (Exp(x)+Exp(-x))/2;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -43,18 +43,18 @@ func Sqrt(x float64) float64 {
|
||||||
temp := 0.5*(1+y);
|
temp := 0.5*(1+y);
|
||||||
|
|
||||||
for exp > 60 {
|
for exp > 60 {
|
||||||
temp = temp * float64(1<<30);
|
temp = temp*float64(1<<30);
|
||||||
exp = exp-60;
|
exp = exp-60;
|
||||||
}
|
}
|
||||||
for exp < -60 {
|
for exp < -60 {
|
||||||
temp = temp / float64(1<<30);
|
temp = temp/float64(1<<30);
|
||||||
exp = exp+60;
|
exp = exp+60;
|
||||||
}
|
}
|
||||||
if exp >= 0 {
|
if exp >= 0 {
|
||||||
exp = 1 << uint(exp/2);
|
exp = 1<<uint(exp/2);
|
||||||
temp = temp*float64(exp);
|
temp = temp*float64(exp);
|
||||||
} else {
|
} else {
|
||||||
exp = 1 << uint(-exp / 2);
|
exp = 1<<uint(-exp / 2);
|
||||||
temp = temp/float64(exp);
|
temp = temp/float64(exp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -111,7 +111,7 @@ func itod(i int) string {
|
||||||
bp := len(b);
|
bp := len(b);
|
||||||
for ; u > 0; u /= 10 {
|
for ; u > 0; u /= 10 {
|
||||||
bp--;
|
bp--;
|
||||||
b[bp] = byte(u%10) + '0';
|
b[bp] = byte(u%10)+'0';
|
||||||
}
|
}
|
||||||
|
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ func TestMkdirAll(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make file.
|
// Make file.
|
||||||
fpath := path + "/file";
|
fpath := path+"/file";
|
||||||
_, err = Open(fpath, O_WRONLY|O_CREAT, 0666);
|
_, err = Open(fpath, O_WRONLY|O_CREAT, 0666);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("create %q: %s", fpath, err);
|
t.Fatalf("create %q: %s", fpath, err);
|
||||||
|
|
@ -64,7 +64,7 @@ func TestMkdirAll(t *testing.T) {
|
||||||
func TestRemoveAll(t *testing.T) {
|
func TestRemoveAll(t *testing.T) {
|
||||||
// Work directory.
|
// Work directory.
|
||||||
path := "_obj/_TestRemoveAll_";
|
path := "_obj/_TestRemoveAll_";
|
||||||
fpath := path + "/file";
|
fpath := path+"/file";
|
||||||
dpath := path+"/dir";
|
dpath := path+"/dir";
|
||||||
|
|
||||||
// Make directory with 1 file and remove.
|
// Make directory with 1 file and remove.
|
||||||
|
|
@ -92,7 +92,7 @@ func TestRemoveAll(t *testing.T) {
|
||||||
t.Fatalf("create %q: %s", fpath, err);
|
t.Fatalf("create %q: %s", fpath, err);
|
||||||
}
|
}
|
||||||
fd.Close();
|
fd.Close();
|
||||||
fd, err = Open(dpath + "/file", O_WRONLY|O_CREAT, 0666);
|
fd, err = Open(dpath+"/file", O_WRONLY|O_CREAT, 0666);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("create %q: %s", fpath, err);
|
t.Fatalf("create %q: %s", fpath, err);
|
||||||
}
|
}
|
||||||
|
|
@ -109,7 +109,7 @@ func TestRemoveAll(t *testing.T) {
|
||||||
t.Fatalf("MkdirAll %q: %s", dpath, err);
|
t.Fatalf("MkdirAll %q: %s", dpath, err);
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range []string{fpath, dpath + "/file1", path+"/zzz"} {
|
for _, s := range []string{fpath, dpath+"/file1", path+"/zzz"} {
|
||||||
fd, err = Open(s, O_WRONLY|O_CREAT, 0666);
|
fd, err = Open(s, O_WRONLY|O_CREAT, 0666);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("create %q: %s", s, err);
|
t.Fatalf("create %q: %s", s, err);
|
||||||
|
|
|
||||||
|
|
@ -27,9 +27,9 @@ func dirFromStat(name string, dir *Dir, lstat, stat *syscall.Stat_t) *Dir {
|
||||||
dir.Size = uint64(stat.Size);
|
dir.Size = uint64(stat.Size);
|
||||||
dir.Blksize = uint64(stat.Blksize);
|
dir.Blksize = uint64(stat.Blksize);
|
||||||
dir.Blocks = uint64(stat.Blocks);
|
dir.Blocks = uint64(stat.Blocks);
|
||||||
dir.Atime_ns = uint64(stat.Atime) * 1e9;
|
dir.Atime_ns = uint64(stat.Atime)*1e9;
|
||||||
dir.Mtime_ns = uint64(stat.Mtime) * 1e9;
|
dir.Mtime_ns = uint64(stat.Mtime)*1e9;
|
||||||
dir.Ctime_ns = uint64(stat.Ctime) * 1e9;
|
dir.Ctime_ns = uint64(stat.Ctime)*1e9;
|
||||||
for i := len(name)-1; i >= 0; i-- {
|
for i := len(name)-1; i >= 0; i-- {
|
||||||
if name[i] == '/' {
|
if name[i] == '/' {
|
||||||
name = name[i+1 : len(name)];
|
name = name[i+1 : len(name)];
|
||||||
|
|
|
||||||
|
|
@ -16,5 +16,5 @@ func Time() (sec int64, nsec int64, err Error) {
|
||||||
if errno := syscall.Gettimeofday(&tv); errno != 0 {
|
if errno := syscall.Gettimeofday(&tv); errno != 0 {
|
||||||
return 0, 0, NewSyscallError("gettimeofday", errno);
|
return 0, 0, NewSyscallError("gettimeofday", errno);
|
||||||
}
|
}
|
||||||
return int64(tv.Sec), int64(tv.Usec) * 1000, err;
|
return int64(tv.Sec), int64(tv.Usec)*1000, err;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ import "strings"
|
||||||
// http://plan9.bell-labs.com/sys/doc/lexnames.html
|
// http://plan9.bell-labs.com/sys/doc/lexnames.html
|
||||||
func Clean(path string) string {
|
func Clean(path string) string {
|
||||||
if path == "" {
|
if path == "" {
|
||||||
return "."
|
return ".";
|
||||||
}
|
}
|
||||||
|
|
||||||
rooted := path[0] == '/';
|
rooted := path[0] == '/';
|
||||||
|
|
@ -105,10 +105,10 @@ func Clean(path string) string {
|
||||||
func Split(path string) (dir, file string) {
|
func Split(path string) (dir, file string) {
|
||||||
for i := len(path)-1; i >= 0; i-- {
|
for i := len(path)-1; i >= 0; i-- {
|
||||||
if path[i] == '/' {
|
if path[i] == '/' {
|
||||||
return path[0:i+1], path[i+1:len(path)];
|
return path[0 : i+1], path[i+1 : len(path)];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return "", path
|
return "", path;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Join joins dir and file into a single path, adding a separating
|
// Join joins dir and file into a single path, adding a separating
|
||||||
|
|
@ -117,7 +117,7 @@ func Join(dir, file string) string {
|
||||||
if dir == "" {
|
if dir == "" {
|
||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
return Clean(dir + "/" + file);
|
return Clean(dir+"/"+file);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ext returns the file name extension used by path.
|
// Ext returns the file name extension used by path.
|
||||||
|
|
@ -130,6 +130,5 @@ func Ext(path string) string {
|
||||||
return path[i:len(path)];
|
return path[i:len(path)];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ""
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,14 +5,14 @@
|
||||||
package path
|
package path
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing";
|
||||||
)
|
)
|
||||||
|
|
||||||
type CleanTest struct {
|
type CleanTest struct {
|
||||||
path, clean string
|
path, clean string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var cleantests = []CleanTest {
|
var cleantests = []CleanTest{
|
||||||
// Already clean
|
// Already clean
|
||||||
CleanTest{"", "."},
|
CleanTest{"", "."},
|
||||||
CleanTest{"abc", "abc"},
|
CleanTest{"abc", "abc"},
|
||||||
|
|
@ -71,10 +71,10 @@ func TestClean(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type SplitTest struct {
|
type SplitTest struct {
|
||||||
path, dir, file string
|
path, dir, file string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var splittests = []SplitTest {
|
var splittests = []SplitTest{
|
||||||
SplitTest{"a/b", "a/", "b"},
|
SplitTest{"a/b", "a/", "b"},
|
||||||
SplitTest{"a/b/", "a/b/", ""},
|
SplitTest{"a/b/", "a/b/", ""},
|
||||||
SplitTest{"a/", "a/", ""},
|
SplitTest{"a/", "a/", ""},
|
||||||
|
|
@ -91,10 +91,10 @@ func TestSplit(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type JoinTest struct {
|
type JoinTest struct {
|
||||||
dir, file, path string
|
dir, file, path string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var jointests = []JoinTest {
|
var jointests = []JoinTest{
|
||||||
JoinTest{"a", "b", "a/b"},
|
JoinTest{"a", "b", "a/b"},
|
||||||
JoinTest{"a", "", "a"},
|
JoinTest{"a", "", "a"},
|
||||||
JoinTest{"", "b", "b"},
|
JoinTest{"", "b", "b"},
|
||||||
|
|
@ -113,10 +113,10 @@ func TestJoin(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExtTest struct {
|
type ExtTest struct {
|
||||||
path, ext string
|
path, ext string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var exttests = []ExtTest {
|
var exttests = []ExtTest{
|
||||||
ExtTest{"path.go", ".go"},
|
ExtTest{"path.go", ".go"},
|
||||||
ExtTest{"path.pb.go", ".go"},
|
ExtTest{"path.pb.go", ".go"},
|
||||||
ExtTest{"a.dir/b", ""},
|
ExtTest{"a.dir/b", ""},
|
||||||
|
|
@ -131,4 +131,3 @@ func TestExt(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -470,7 +470,7 @@ func TestAllMatches(t *testing.T) {
|
||||||
|
|
||||||
switch c.matchfunc {
|
switch c.matchfunc {
|
||||||
case "matchit":
|
case "matchit":
|
||||||
result = make([]string, len(c.input) + 1);
|
result = make([]string, len(c.input)+1);
|
||||||
i := 0;
|
i := 0;
|
||||||
b := strings.Bytes(c.input);
|
b := strings.Bytes(c.input);
|
||||||
for match := range re.AllMatchesIter(b, c.n) {
|
for match := range re.AllMatchesIter(b, c.n) {
|
||||||
|
|
@ -479,7 +479,7 @@ func TestAllMatches(t *testing.T) {
|
||||||
}
|
}
|
||||||
result = result[0:i];
|
result = result[0:i];
|
||||||
case "stringmatchit":
|
case "stringmatchit":
|
||||||
result = make([]string, len(c.input) + 1);
|
result = make([]string, len(c.input)+1);
|
||||||
i := 0;
|
i := 0;
|
||||||
for match := range re.AllMatchesStringIter(c.input, c.n) {
|
for match := range re.AllMatchesStringIter(c.input, c.n) {
|
||||||
result[i] = match;
|
result[i] = match;
|
||||||
|
|
@ -487,7 +487,7 @@ func TestAllMatches(t *testing.T) {
|
||||||
}
|
}
|
||||||
result = result[0:i];
|
result = result[0:i];
|
||||||
case "match":
|
case "match":
|
||||||
result = make([]string, len(c.input) + 1);
|
result = make([]string, len(c.input)+1);
|
||||||
b := strings.Bytes(c.input);
|
b := strings.Bytes(c.input);
|
||||||
i := 0;
|
i := 0;
|
||||||
for _, match := range re.AllMatches(b, c.n) {
|
for _, match := range re.AllMatches(b, c.n) {
|
||||||
|
|
|
||||||
|
|
@ -265,11 +265,11 @@ func decimalAtof64(neg bool, d *decimal, trunc bool) (f float64, ok bool) {
|
||||||
f *= float64pow10[k-22];
|
f *= float64pow10[k-22];
|
||||||
k = 22;
|
k = 22;
|
||||||
}
|
}
|
||||||
return f*float64pow10[k], true;
|
return f * float64pow10[k], true;
|
||||||
|
|
||||||
case d.dp < d.nd && d.nd - d.dp <= 22: // int / 10^k
|
case d.dp < d.nd && d.nd - d.dp <= 22: // int / 10^k
|
||||||
f := decimalAtof64Int(neg, d);
|
f := decimalAtof64Int(neg, d);
|
||||||
return f/float64pow10[d.nd - d.dp], true;
|
return f / float64pow10[d.nd - d.dp], true;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -296,11 +296,11 @@ func decimalAtof32(neg bool, d *decimal, trunc bool) (f float32, ok bool) {
|
||||||
f *= float32pow10[k-10];
|
f *= float32pow10[k-10];
|
||||||
k = 10;
|
k = 10;
|
||||||
}
|
}
|
||||||
return f*float32pow10[k], true;
|
return f * float32pow10[k], true;
|
||||||
|
|
||||||
case d.dp < d.nd && d.nd - d.dp <= 10: // int / 10^k
|
case d.dp < d.nd && d.nd - d.dp <= 10: // int / 10^k
|
||||||
f := decimalAtof32Int(neg, d);
|
f := decimalAtof32Int(neg, d);
|
||||||
return f/float32pow10[d.nd - d.dp], true;
|
return f / float32pow10[d.nd - d.dp], true;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ var shifttests = []shiftTest{
|
||||||
shiftTest{1, 100, "1267650600228229401496703205376"},
|
shiftTest{1, 100, "1267650600228229401496703205376"},
|
||||||
shiftTest{1, -100,
|
shiftTest{1, -100,
|
||||||
"0.00000000000000000000000000000078886090522101180541"
|
"0.00000000000000000000000000000078886090522101180541"
|
||||||
"17285652827862296732064351090230047702789306640625",
|
"17285652827862296732064351090230047702789306640625",
|
||||||
},
|
},
|
||||||
shiftTest{12345678, 8, "3160493568"},
|
shiftTest{12345678, 8, "3160493568"},
|
||||||
shiftTest{12345678, -8, "48225.3046875"},
|
shiftTest{12345678, -8, "48225.3046875"},
|
||||||
|
|
|
||||||
|
|
@ -16,13 +16,13 @@ import (
|
||||||
func pow2(i int) float64 {
|
func pow2(i int) float64 {
|
||||||
switch {
|
switch {
|
||||||
case i < 0:
|
case i < 0:
|
||||||
return 1 / pow2(-i);
|
return 1/pow2(-i);
|
||||||
case i == 0:
|
case i == 0:
|
||||||
return 1;
|
return 1;
|
||||||
case i == 1:
|
case i == 1:
|
||||||
return 2;
|
return 2;
|
||||||
}
|
}
|
||||||
return pow2(i/2) * pow2(i - i/2);
|
return pow2(i/2)*pow2(i - i/2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrapper around strconv.Atof64. Handles dddddp+ddd (binary exponent)
|
// Wrapper around strconv.Atof64. Handles dddddp+ddd (binary exponent)
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,13 @@ import "math"
|
||||||
|
|
||||||
// TODO: move elsewhere?
|
// TODO: move elsewhere?
|
||||||
type floatInfo struct {
|
type floatInfo struct {
|
||||||
mantbits uint;
|
mantbits uint;
|
||||||
expbits uint;
|
expbits uint;
|
||||||
bias int;
|
bias int;
|
||||||
}
|
}
|
||||||
var float32info = floatInfo{ 23, 8, -127 }
|
|
||||||
var float64info = floatInfo{ 52, 11, -1023 }
|
var float32info = floatInfo{23, 8, -127}
|
||||||
|
var float64info = floatInfo{52, 11, -1023}
|
||||||
|
|
||||||
func floatsize() int {
|
func floatsize() int {
|
||||||
// Figure out whether float is float32 or float64.
|
// Figure out whether float is float32 or float64.
|
||||||
|
|
@ -72,12 +73,12 @@ func Ftoa(f float, fmt byte, prec int) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
||||||
neg := bits>>flt.expbits>>flt.mantbits != 0;
|
neg := bits >> flt.expbits >> flt.mantbits != 0;
|
||||||
exp := int(bits>>flt.mantbits) & (1<<flt.expbits - 1);
|
exp := int(bits >> flt.mantbits)&(1 << flt.expbits - 1);
|
||||||
mant := bits & (uint64(1)<<flt.mantbits - 1);
|
mant := bits&(uint64(1) << flt.mantbits - 1);
|
||||||
|
|
||||||
switch exp {
|
switch exp {
|
||||||
case 1<<flt.expbits - 1:
|
case 1 << flt.expbits - 1:
|
||||||
// Inf, NaN
|
// Inf, NaN
|
||||||
if mant != 0 {
|
if mant != 0 {
|
||||||
return "NaN";
|
return "NaN";
|
||||||
|
|
@ -93,7 +94,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// add implicit top bit
|
// add implicit top bit
|
||||||
mant |= uint64(1)<<flt.mantbits;
|
mant |= uint64(1) << flt.mantbits;
|
||||||
}
|
}
|
||||||
exp += flt.bias;
|
exp += flt.bias;
|
||||||
|
|
||||||
|
|
@ -106,7 +107,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
||||||
// The shift is exp - flt.mantbits because mant is a 1-bit integer
|
// The shift is exp - flt.mantbits because mant is a 1-bit integer
|
||||||
// followed by a flt.mantbits fraction, and we are treating it as
|
// followed by a flt.mantbits fraction, and we are treating it as
|
||||||
// a 1+flt.mantbits-bit integer.
|
// a 1+flt.mantbits-bit integer.
|
||||||
d := newDecimal(mant).Shift(exp - int(flt.mantbits));
|
d := newDecimal(mant).Shift(exp-int(flt.mantbits));
|
||||||
|
|
||||||
// Round appropriately.
|
// Round appropriately.
|
||||||
// Negative precision means "only as much as needed to be exact."
|
// Negative precision means "only as much as needed to be exact."
|
||||||
|
|
@ -127,7 +128,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
||||||
case 'e', 'E':
|
case 'e', 'E':
|
||||||
d.Round(prec+1);
|
d.Round(prec+1);
|
||||||
case 'f':
|
case 'f':
|
||||||
d.Round(d.dp+prec);
|
d.Round(d.dp + prec);
|
||||||
case 'g', 'G':
|
case 'g', 'G':
|
||||||
if prec == 0 {
|
if prec == 0 {
|
||||||
prec = 1;
|
prec = 1;
|
||||||
|
|
@ -151,16 +152,16 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
|
||||||
// if precision was the shortest possible, use precision 6 for this decision.
|
// if precision was the shortest possible, use precision 6 for this decision.
|
||||||
eprec := prec;
|
eprec := prec;
|
||||||
if shortest {
|
if shortest {
|
||||||
eprec = 6
|
eprec = 6;
|
||||||
}
|
}
|
||||||
exp := d.dp - 1;
|
exp := d.dp - 1;
|
||||||
if exp < -4 || exp >= eprec {
|
if exp < -4 || exp >= eprec {
|
||||||
return fmtE(neg, d, prec - 1, fmt + 'e' - 'g');
|
return fmtE(neg, d, prec-1, fmt+'e'-'g');
|
||||||
}
|
}
|
||||||
return fmtF(neg, d, max(prec - d.dp, 0));
|
return fmtF(neg, d, max(prec - d.dp, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
return "%" + string(fmt);
|
return "%"+string(fmt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Round d (= mant * 2^exp) to the shortest number of digits
|
// Round d (= mant * 2^exp) to the shortest number of digits
|
||||||
|
|
@ -185,7 +186,7 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
|
||||||
// d = mant << (exp - mantbits)
|
// d = mant << (exp - mantbits)
|
||||||
// Next highest floating point number is mant+1 << exp-mantbits.
|
// Next highest floating point number is mant+1 << exp-mantbits.
|
||||||
// Our upper bound is halfway inbetween, mant*2+1 << exp-mantbits-1.
|
// Our upper bound is halfway inbetween, mant*2+1 << exp-mantbits-1.
|
||||||
upper := newDecimal(mant*2+1).Shift(exp-int(flt.mantbits)-1);
|
upper := newDecimal(mant*2 + 1).Shift(exp-int(flt.mantbits)-1);
|
||||||
|
|
||||||
// d = mant << (exp - mantbits)
|
// d = mant << (exp - mantbits)
|
||||||
// Next lowest floating point number is mant-1 << exp-mantbits,
|
// Next lowest floating point number is mant-1 << exp-mantbits,
|
||||||
|
|
@ -196,14 +197,14 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
|
||||||
minexp := flt.bias + 1; // minimum possible exponent
|
minexp := flt.bias + 1; // minimum possible exponent
|
||||||
var mantlo uint64;
|
var mantlo uint64;
|
||||||
var explo int;
|
var explo int;
|
||||||
if mant > 1<<flt.mantbits || exp == minexp {
|
if mant > 1 << flt.mantbits || exp == minexp {
|
||||||
mantlo = mant - 1;
|
mantlo = mant-1;
|
||||||
explo = exp;
|
explo = exp;
|
||||||
} else {
|
} else {
|
||||||
mantlo = mant*2-1;
|
mantlo = mant*2 - 1;
|
||||||
explo = exp-1;
|
explo = exp-1;
|
||||||
}
|
}
|
||||||
lower := newDecimal(mantlo*2+1).Shift(explo-int(flt.mantbits)-1);
|
lower := newDecimal(mantlo*2 + 1).Shift(explo-int(flt.mantbits)-1);
|
||||||
|
|
||||||
// The upper and lower bounds are possible outputs only if
|
// The upper and lower bounds are possible outputs only if
|
||||||
// the original mantissa is even, so that IEEE round-to-even
|
// the original mantissa is even, so that IEEE round-to-even
|
||||||
|
|
@ -252,8 +253,8 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
|
||||||
|
|
||||||
// %e: -d.ddddde±dd
|
// %e: -d.ddddde±dd
|
||||||
func fmtE(neg bool, d *decimal, prec int, fmt byte) string {
|
func fmtE(neg bool, d *decimal, prec int, fmt byte) string {
|
||||||
buf := make([]byte, 3+max(prec, 0)+30); // "-0." + prec digits + exp
|
buf := make([]byte, 3 + max(prec, 0) + 30); // "-0." + prec digits + exp
|
||||||
w := 0; // write index
|
w := 0; // write index
|
||||||
|
|
||||||
// sign
|
// sign
|
||||||
if neg {
|
if neg {
|
||||||
|
|
@ -322,7 +323,7 @@ func fmtE(neg bool, d *decimal, prec int, fmt byte) string {
|
||||||
|
|
||||||
// %f: -ddddddd.ddddd
|
// %f: -ddddddd.ddddd
|
||||||
func fmtF(neg bool, d *decimal, prec int) string {
|
func fmtF(neg bool, d *decimal, prec int) string {
|
||||||
buf := make([]byte, 1+max(d.dp, 1)+1+max(prec, 0));
|
buf := make([]byte, 1 + max(d.dp, 1) + 1 + max(prec, 0));
|
||||||
w := 0;
|
w := 0;
|
||||||
|
|
||||||
// sign
|
// sign
|
||||||
|
|
@ -352,10 +353,10 @@ func fmtF(neg bool, d *decimal, prec int) string {
|
||||||
buf[w] = '.';
|
buf[w] = '.';
|
||||||
w++;
|
w++;
|
||||||
for i := 0; i < prec; i++ {
|
for i := 0; i < prec; i++ {
|
||||||
if d.dp+i < 0 || d.dp+i >= d.nd {
|
if d.dp + i < 0 || d.dp + i >= d.nd {
|
||||||
buf[w] = '0';
|
buf[w] = '0';
|
||||||
} else {
|
} else {
|
||||||
buf[w] = d.d[d.dp+i];
|
buf[w] = d.d[d.dp + i];
|
||||||
}
|
}
|
||||||
w++;
|
w++;
|
||||||
}
|
}
|
||||||
|
|
@ -379,7 +380,7 @@ func fmtB(neg bool, mant uint64, exp int, flt *floatInfo) string {
|
||||||
n++;
|
n++;
|
||||||
w--;
|
w--;
|
||||||
buf[w] = byte(exp%10 + '0');
|
buf[w] = byte(exp%10 + '0');
|
||||||
exp /= 10
|
exp /= 10;
|
||||||
}
|
}
|
||||||
w--;
|
w--;
|
||||||
buf[w] = esign;
|
buf[w] = esign;
|
||||||
|
|
@ -405,4 +406,3 @@ func max(a, b int) int {
|
||||||
}
|
}
|
||||||
return b;
|
return b;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,32 +13,32 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Test struct {
|
type Test struct {
|
||||||
in, out, err string
|
in, out, err string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type T struct {
|
type T struct {
|
||||||
item string;
|
item string;
|
||||||
value string;
|
value string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type S struct {
|
type S struct {
|
||||||
header string;
|
header string;
|
||||||
integer int;
|
integer int;
|
||||||
raw string;
|
raw string;
|
||||||
innerT T;
|
innerT T;
|
||||||
innerPointerT *T;
|
innerPointerT *T;
|
||||||
data []T;
|
data []T;
|
||||||
pdata []*T;
|
pdata []*T;
|
||||||
empty []*T;
|
empty []*T;
|
||||||
emptystring string;
|
emptystring string;
|
||||||
null []*T;
|
null []*T;
|
||||||
vec *vector.Vector;
|
vec *vector.Vector;
|
||||||
true bool;
|
true bool;
|
||||||
false bool;
|
false bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
var t1 = T{ "ItemNumber1", "ValueNumber1" }
|
var t1 = T{"ItemNumber1", "ValueNumber1"}
|
||||||
var t2 = T{ "ItemNumber2", "ValueNumber2" }
|
var t2 = T{"ItemNumber2", "ValueNumber2"}
|
||||||
|
|
||||||
func uppercase(v interface{}) string {
|
func uppercase(v interface{}) string {
|
||||||
s := v.(string);
|
s := v.(string);
|
||||||
|
|
@ -46,7 +46,7 @@ func uppercase(v interface{}) string {
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
c := s[i];
|
c := s[i];
|
||||||
if 'a' <= c && c <= 'z' {
|
if 'a' <= c && c <= 'z' {
|
||||||
c = c + 'A' - 'a'
|
c = c+'A'-'a';
|
||||||
}
|
}
|
||||||
t += string(c);
|
t += string(c);
|
||||||
}
|
}
|
||||||
|
|
@ -55,36 +55,36 @@ func uppercase(v interface{}) string {
|
||||||
|
|
||||||
func plus1(v interface{}) string {
|
func plus1(v interface{}) string {
|
||||||
i := v.(int);
|
i := v.(int);
|
||||||
return fmt.Sprint(i + 1);
|
return fmt.Sprint(i+1);
|
||||||
}
|
}
|
||||||
|
|
||||||
func writer(f func(interface{}) string) (func(io.Writer, interface{}, string)) {
|
func writer(f func(interface{}) string) (func(io.Writer, interface{}, string)) {
|
||||||
return func(w io.Writer, v interface{}, format string) {
|
return func(w io.Writer, v interface{}, format string) {
|
||||||
io.WriteString(w, f(v));
|
io.WriteString(w, f(v));
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
var formatters = FormatterMap {
|
var formatters = FormatterMap{
|
||||||
"uppercase" : writer(uppercase),
|
"uppercase": writer(uppercase),
|
||||||
"+1" : writer(plus1),
|
"+1": writer(plus1),
|
||||||
}
|
}
|
||||||
|
|
||||||
var tests = []*Test {
|
var tests = []*Test{
|
||||||
// Simple
|
// Simple
|
||||||
&Test{ "", "", "" },
|
&Test{"", "", ""},
|
||||||
&Test{ "abc\ndef\n", "abc\ndef\n", "" },
|
&Test{"abc\ndef\n", "abc\ndef\n", ""},
|
||||||
&Test{ " {.meta-left} \n", "{", "" },
|
&Test{" {.meta-left} \n", "{", ""},
|
||||||
&Test{ " {.meta-right} \n", "}", "" },
|
&Test{" {.meta-right} \n", "}", ""},
|
||||||
&Test{ " {.space} \n", " ", "" },
|
&Test{" {.space} \n", " ", ""},
|
||||||
&Test{ " {.tab} \n", "\t", "" },
|
&Test{" {.tab} \n", "\t", ""},
|
||||||
&Test{ " {#comment} \n", "", "" },
|
&Test{" {#comment} \n", "", ""},
|
||||||
|
|
||||||
// Variables at top level
|
// Variables at top level
|
||||||
&Test{
|
&Test{
|
||||||
in: "{header}={integer}\n",
|
in: "{header}={integer}\n",
|
||||||
|
|
||||||
out: "Header=77\n"
|
out: "Header=77\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
// Section
|
// Section
|
||||||
|
|
@ -93,21 +93,21 @@ var tests = []*Test {
|
||||||
"some text for the section\n"
|
"some text for the section\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "some text for the section\n"
|
out: "some text for the section\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section data }\n"
|
in: "{.section data }\n"
|
||||||
"{header}={integer}\n"
|
"{header}={integer}\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "Header=77\n"
|
out: "Header=77\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section pdata }\n"
|
in: "{.section pdata }\n"
|
||||||
"{header}={integer}\n"
|
"{header}={integer}\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "Header=77\n"
|
out: "Header=77\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section pdata }\n"
|
in: "{.section pdata }\n"
|
||||||
|
|
@ -116,7 +116,7 @@ var tests = []*Test {
|
||||||
"data not present\n"
|
"data not present\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "data present\n"
|
out: "data present\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section empty }\n"
|
in: "{.section empty }\n"
|
||||||
|
|
@ -125,7 +125,7 @@ var tests = []*Test {
|
||||||
"data not present\n"
|
"data not present\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "data not present\n"
|
out: "data not present\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section null }\n"
|
in: "{.section null }\n"
|
||||||
|
|
@ -134,7 +134,7 @@ var tests = []*Test {
|
||||||
"data not present\n"
|
"data not present\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "data not present\n"
|
out: "data not present\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section pdata }\n"
|
in: "{.section pdata }\n"
|
||||||
|
|
@ -145,12 +145,12 @@ var tests = []*Test {
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "Header=77\n"
|
out: "Header=77\n"
|
||||||
"Header=77\n"
|
"Header=77\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section data}{.end} {header}\n",
|
in: "{.section data}{.end} {header}\n",
|
||||||
|
|
||||||
out: " Header\n"
|
out: " Header\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
// Repeated
|
// Repeated
|
||||||
|
|
@ -162,7 +162,7 @@ var tests = []*Test {
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "ItemNumber1=ValueNumber1\n"
|
out: "ItemNumber1=ValueNumber1\n"
|
||||||
"ItemNumber2=ValueNumber2\n"
|
"ItemNumber2=ValueNumber2\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section pdata }\n"
|
in: "{.section pdata }\n"
|
||||||
|
|
@ -174,7 +174,7 @@ var tests = []*Test {
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "ItemNumber1=ValueNumber1\n"
|
out: "ItemNumber1=ValueNumber1\n"
|
||||||
"ItemNumber2=ValueNumber2\n"
|
"ItemNumber2=ValueNumber2\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section @ }\n"
|
in: "{.section @ }\n"
|
||||||
|
|
@ -185,7 +185,7 @@ var tests = []*Test {
|
||||||
"{.end}\n"
|
"{.end}\n"
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "this should appear: empty field\n"
|
out: "this should appear: empty field\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.repeated section pdata }\n"
|
in: "{.repeated section pdata }\n"
|
||||||
|
|
@ -196,7 +196,7 @@ var tests = []*Test {
|
||||||
|
|
||||||
out: "ItemNumber1\n"
|
out: "ItemNumber1\n"
|
||||||
"is\nover\nmultiple\nlines\n"
|
"is\nover\nmultiple\nlines\n"
|
||||||
"ItemNumber2\n"
|
"ItemNumber2\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section pdata }\n"
|
in: "{.section pdata }\n"
|
||||||
|
|
@ -210,7 +210,7 @@ var tests = []*Test {
|
||||||
|
|
||||||
out: "ItemNumber1=ValueNumber1\n"
|
out: "ItemNumber1=ValueNumber1\n"
|
||||||
"DIVIDER\n"
|
"DIVIDER\n"
|
||||||
"ItemNumber2=ValueNumber2\n"
|
"ItemNumber2=ValueNumber2\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.repeated section vec }\n"
|
in: "{.repeated section vec }\n"
|
||||||
|
|
@ -218,7 +218,7 @@ var tests = []*Test {
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "elt1\n"
|
out: "elt1\n"
|
||||||
"elt2\n"
|
"elt2\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.repeated section integer}{.end}",
|
in: "{.repeated section integer}{.end}",
|
||||||
|
|
@ -232,14 +232,14 @@ var tests = []*Test {
|
||||||
"{innerT.item}={innerT.value}\n"
|
"{innerT.item}={innerT.value}\n"
|
||||||
"{.end}",
|
"{.end}",
|
||||||
|
|
||||||
out: "ItemNumber1=ValueNumber1\n"
|
out: "ItemNumber1=ValueNumber1\n",
|
||||||
},
|
},
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section @ }\n"
|
in: "{.section @ }\n"
|
||||||
"{innerT.item}={.section innerT}{.section value}{@}{.end}{.end}\n"
|
"{innerT.item}={.section innerT}{.section value}{@}{.end}{.end}\n"
|
||||||
"{.end}",
|
"{.end}",
|
||||||
|
|
||||||
out: "ItemNumber1=ValueNumber1\n"
|
out: "ItemNumber1=ValueNumber1\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -251,7 +251,7 @@ var tests = []*Test {
|
||||||
"{.end}\n",
|
"{.end}\n",
|
||||||
|
|
||||||
out: "HEADER=78\n"
|
out: "HEADER=78\n"
|
||||||
"Header=77\n"
|
"Header=77\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
&Test{
|
&Test{
|
||||||
|
|
@ -259,21 +259,21 @@ var tests = []*Test {
|
||||||
"{raw|html}\n",
|
"{raw|html}\n",
|
||||||
|
|
||||||
out: "&<>!@ #$%^\n"
|
out: "&<>!@ #$%^\n"
|
||||||
"&<>!@ #$%^\n"
|
"&<>!@ #$%^\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
&Test{
|
&Test{
|
||||||
in: "{.section emptystring}emptystring{.end}\n"
|
in: "{.section emptystring}emptystring{.end}\n"
|
||||||
"{.section header}header{.end}\n",
|
"{.section header}header{.end}\n",
|
||||||
|
|
||||||
out: "\nheader\n"
|
out: "\nheader\n",
|
||||||
},
|
},
|
||||||
|
|
||||||
&Test {
|
&Test{
|
||||||
in: "{.section true}1{.or}2{.end}\n"
|
in: "{.section true}1{.or}2{.end}\n"
|
||||||
"{.section false}3{.or}4{.end}\n",
|
"{.section false}3{.or}4{.end}\n",
|
||||||
|
|
||||||
out: "1\n4\n"
|
out: "1\n4\n",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -284,9 +284,9 @@ func TestAll(t *testing.T) {
|
||||||
s.integer = 77;
|
s.integer = 77;
|
||||||
s.raw = "&<>!@ #$%^";
|
s.raw = "&<>!@ #$%^";
|
||||||
s.innerT = t1;
|
s.innerT = t1;
|
||||||
s.data = []T{ t1, t2 };
|
s.data = []T{t1, t2};
|
||||||
s.pdata = []*T{ &t1, &t2 };
|
s.pdata = []*T{&t1, &t2};
|
||||||
s.empty = []*T{ };
|
s.empty = []*T{};
|
||||||
s.null = nil;
|
s.null = nil;
|
||||||
s.vec = vector.New(0);
|
s.vec = vector.New(0);
|
||||||
s.vec.Push("elt1");
|
s.vec.Push("elt1");
|
||||||
|
|
@ -321,28 +321,28 @@ func TestAll(t *testing.T) {
|
||||||
func TestStringDriverType(t *testing.T) {
|
func TestStringDriverType(t *testing.T) {
|
||||||
tmpl, err := Parse("template: {@}", nil);
|
tmpl, err := Parse("template: {@}", nil);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("unexpected parse error:", err)
|
t.Error("unexpected parse error:", err);
|
||||||
}
|
}
|
||||||
var b bytes.Buffer;
|
var b bytes.Buffer;
|
||||||
err = tmpl.Execute("hello", &b);
|
err = tmpl.Execute("hello", &b);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("unexpected execute error:", err)
|
t.Error("unexpected execute error:", err);
|
||||||
}
|
}
|
||||||
s := b.String();
|
s := b.String();
|
||||||
if s != "template: hello" {
|
if s != "template: hello" {
|
||||||
t.Errorf("failed passing string as data: expected %q got %q", "template: hello", s)
|
t.Errorf("failed passing string as data: expected %q got %q", "template: hello", s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTwice(t *testing.T) {
|
func TestTwice(t *testing.T) {
|
||||||
tmpl, err := Parse("template: {@}", nil);
|
tmpl, err := Parse("template: {@}", nil);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("unexpected parse error:", err)
|
t.Error("unexpected parse error:", err);
|
||||||
}
|
}
|
||||||
var b bytes.Buffer;
|
var b bytes.Buffer;
|
||||||
err = tmpl.Execute("hello", &b);
|
err = tmpl.Execute("hello", &b);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("unexpected parse error:", err)
|
t.Error("unexpected parse error:", err);
|
||||||
}
|
}
|
||||||
s := b.String();
|
s := b.String();
|
||||||
text := "template: hello";
|
text := "template: hello";
|
||||||
|
|
@ -351,7 +351,7 @@ func TestTwice(t *testing.T) {
|
||||||
}
|
}
|
||||||
err = tmpl.Execute("hello", &b);
|
err = tmpl.Execute("hello", &b);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("unexpected parse error:", err)
|
t.Error("unexpected parse error:", err);
|
||||||
}
|
}
|
||||||
s = b.String();
|
s = b.String();
|
||||||
text += text;
|
text += text;
|
||||||
|
|
@ -377,9 +377,9 @@ func TestCustomDelims(t *testing.T) {
|
||||||
err := tmpl.Parse(text);
|
err := tmpl.Parse(text);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if i == 0 || j == 0 { // expected
|
if i == 0 || j == 0 { // expected
|
||||||
continue
|
continue;
|
||||||
}
|
}
|
||||||
t.Error("unexpected parse error:", err)
|
t.Error("unexpected parse error:", err);
|
||||||
} else if i == 0 || j == 0 {
|
} else if i == 0 || j == 0 {
|
||||||
t.Errorf("expected parse error for empty delimiter: %d %d %q %q", i, j, ldelim, rdelim);
|
t.Errorf("expected parse error for empty delimiter: %d %d %q %q", i, j, ldelim, rdelim);
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -388,7 +388,7 @@ func TestCustomDelims(t *testing.T) {
|
||||||
err = tmpl.Execute("hello", &b);
|
err = tmpl.Execute("hello", &b);
|
||||||
s := b.String();
|
s := b.String();
|
||||||
if s != "template: hello" + ldelim + rdelim {
|
if s != "template: hello" + ldelim + rdelim {
|
||||||
t.Errorf("failed delim check(%q %q) %q got %q", ldelim, rdelim, text, s)
|
t.Errorf("failed delim check(%q %q) %q got %q", ldelim, rdelim, text, s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -408,7 +408,7 @@ func TestVarIndirection(t *testing.T) {
|
||||||
}
|
}
|
||||||
err = tmpl.Execute(s, &buf);
|
err = tmpl.Execute(s, &buf);
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("unexpected execute error:", err)
|
t.Fatal("unexpected execute error:", err);
|
||||||
}
|
}
|
||||||
expect := fmt.Sprintf("%v", &t1); // output should be hex address of t1
|
expect := fmt.Sprintf("%v", &t1); // output should be hex address of t1
|
||||||
if buf.String() != expect {
|
if buf.String() != expect {
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ func (r *dataErrReader) Read(p []byte) (n int, err os.Error) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
n = bytes.Copy(p, r.unread);
|
n = bytes.Copy(p, r.unread);
|
||||||
r.unread = r.unread[n : len(r.unread)];
|
r.unread = r.unread[n:len(r.unread)];
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -29,28 +29,28 @@ import (
|
||||||
"utf8";
|
"utf8";
|
||||||
)
|
)
|
||||||
|
|
||||||
var debug = false;
|
var debug = false
|
||||||
|
|
||||||
// Error codes returned by failures to parse an expression.
|
// Error codes returned by failures to parse an expression.
|
||||||
var (
|
var (
|
||||||
ErrInternal = "internal error";
|
ErrInternal = "internal error";
|
||||||
ErrUnmatchedLpar = "unmatched ''";
|
ErrUnmatchedLpar = "unmatched ''";
|
||||||
ErrUnmatchedRpar = "unmatched ''";
|
ErrUnmatchedRpar = "unmatched ''";
|
||||||
ErrUnmatchedLbkt = "unmatched '['";
|
ErrUnmatchedLbkt = "unmatched '['";
|
||||||
ErrUnmatchedRbkt = "unmatched ']'";
|
ErrUnmatchedRbkt = "unmatched ']'";
|
||||||
ErrBadRange = "bad range in character class";
|
ErrBadRange = "bad range in character class";
|
||||||
ErrExtraneousBackslash = "extraneous backslash";
|
ErrExtraneousBackslash = "extraneous backslash";
|
||||||
ErrBadClosure = "repeated closure **, ++, etc.";
|
ErrBadClosure = "repeated closure **, ++, etc.";
|
||||||
ErrBareClosure = "closure applies to nothing";
|
ErrBareClosure = "closure applies to nothing";
|
||||||
ErrBadBackslash = "illegal backslash escape";
|
ErrBadBackslash = "illegal backslash escape";
|
||||||
)
|
)
|
||||||
|
|
||||||
// An instruction executed by the NFA
|
// An instruction executed by the NFA
|
||||||
type instr interface {
|
type instr interface {
|
||||||
kind() int; // the type of this instruction: _CHAR, _ANY, etc.
|
kind() int; // the type of this instruction: _CHAR, _ANY, etc.
|
||||||
next() instr; // the instruction to execute after this one
|
next() instr; // the instruction to execute after this one
|
||||||
setNext(i instr);
|
setNext(i instr);
|
||||||
index() int;
|
index() int;
|
||||||
setIndex(i int);
|
setIndex(i int);
|
||||||
print();
|
print();
|
||||||
}
|
}
|
||||||
|
|
@ -61,69 +61,93 @@ type common struct {
|
||||||
_index int;
|
_index int;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *common) next() instr { return c._next }
|
func (c *common) next() instr {
|
||||||
func (c *common) setNext(i instr) { c._next = i }
|
return c._next;
|
||||||
func (c *common) index() int { return c._index }
|
}
|
||||||
func (c *common) setIndex(i int) { c._index = i }
|
func (c *common) setNext(i instr) {
|
||||||
|
c._next = i;
|
||||||
|
}
|
||||||
|
func (c *common) index() int {
|
||||||
|
return c._index;
|
||||||
|
}
|
||||||
|
func (c *common) setIndex(i int) {
|
||||||
|
c._index = i;
|
||||||
|
}
|
||||||
|
|
||||||
// The representation of a compiled regular expression.
|
// The representation of a compiled regular expression.
|
||||||
// The public interface is entirely through methods.
|
// The public interface is entirely through methods.
|
||||||
type Regexp struct {
|
type Regexp struct {
|
||||||
expr string; // the original expression
|
expr string; // the original expression
|
||||||
ch chan<- *Regexp; // reply channel when we're done
|
ch chan<- *Regexp; // reply channel when we're done
|
||||||
error string; // compile- or run-time error; nil if OK
|
error string; // compile- or run-time error; nil if OK
|
||||||
inst []instr;
|
inst []instr;
|
||||||
start instr;
|
start instr;
|
||||||
nbra int; // number of brackets in expression, for subexpressions
|
nbra int; // number of brackets in expression, for subexpressions
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
_START // beginning of program
|
_START = // beginning of program
|
||||||
= iota;
|
iota;
|
||||||
_END; // end of program: success
|
_END; // end of program: success
|
||||||
_BOT; // '^' beginning of text
|
_BOT; // '^' beginning of text
|
||||||
_EOT; // '$' end of text
|
_EOT; // '$' end of text
|
||||||
_CHAR; // 'a' regular character
|
_CHAR; // 'a' regular character
|
||||||
_CHARCLASS; // [a-z] character class
|
_CHARCLASS; // [a-z] character class
|
||||||
_ANY; // '.' any character including newline
|
_ANY; // '.' any character including newline
|
||||||
_NOTNL; // [^\n] special case: any character but newline
|
_NOTNL; // [^\n] special case: any character but newline
|
||||||
_BRA; // '(' parenthesized expression
|
_BRA; // '(' parenthesized expression
|
||||||
_EBRA; // ')'; end of '(' parenthesized expression
|
_EBRA; // ')'; end of '(' parenthesized expression
|
||||||
_ALT; // '|' alternation
|
_ALT; // '|' alternation
|
||||||
_NOP; // do nothing; makes it easy to link without patching
|
_NOP; // do nothing; makes it easy to link without patching
|
||||||
)
|
)
|
||||||
|
|
||||||
// --- START start of program
|
// --- START start of program
|
||||||
type _Start struct {
|
type _Start struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (start *_Start) kind() int { return _START }
|
func (start *_Start) kind() int {
|
||||||
func (start *_Start) print() { print("start") }
|
return _START;
|
||||||
|
}
|
||||||
|
func (start *_Start) print() {
|
||||||
|
print("start");
|
||||||
|
}
|
||||||
|
|
||||||
// --- END end of program
|
// --- END end of program
|
||||||
type _End struct {
|
type _End struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (end *_End) kind() int { return _END }
|
func (end *_End) kind() int {
|
||||||
func (end *_End) print() { print("end") }
|
return _END;
|
||||||
|
}
|
||||||
|
func (end *_End) print() {
|
||||||
|
print("end");
|
||||||
|
}
|
||||||
|
|
||||||
// --- BOT beginning of text
|
// --- BOT beginning of text
|
||||||
type _Bot struct {
|
type _Bot struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bot *_Bot) kind() int { return _BOT }
|
func (bot *_Bot) kind() int {
|
||||||
func (bot *_Bot) print() { print("bot") }
|
return _BOT;
|
||||||
|
}
|
||||||
|
func (bot *_Bot) print() {
|
||||||
|
print("bot");
|
||||||
|
}
|
||||||
|
|
||||||
// --- EOT end of text
|
// --- EOT end of text
|
||||||
type _Eot struct {
|
type _Eot struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (eot *_Eot) kind() int { return _EOT }
|
func (eot *_Eot) kind() int {
|
||||||
func (eot *_Eot) print() { print("eot") }
|
return _EOT;
|
||||||
|
}
|
||||||
|
func (eot *_Eot) print() {
|
||||||
|
print("eot");
|
||||||
|
}
|
||||||
|
|
||||||
// --- CHAR a regular character
|
// --- CHAR a regular character
|
||||||
type _Char struct {
|
type _Char struct {
|
||||||
|
|
@ -131,8 +155,12 @@ type _Char struct {
|
||||||
char int;
|
char int;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (char *_Char) kind() int { return _CHAR }
|
func (char *_Char) kind() int {
|
||||||
func (char *_Char) print() { print("char ", string(char.char)) }
|
return _CHAR;
|
||||||
|
}
|
||||||
|
func (char *_Char) print() {
|
||||||
|
print("char ", string(char.char));
|
||||||
|
}
|
||||||
|
|
||||||
func newChar(char int) *_Char {
|
func newChar(char int) *_Char {
|
||||||
c := new(_Char);
|
c := new(_Char);
|
||||||
|
|
@ -150,7 +178,9 @@ type _CharClass struct {
|
||||||
ranges []int;
|
ranges []int;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cclass *_CharClass) kind() int { return _CHARCLASS }
|
func (cclass *_CharClass) kind() int {
|
||||||
|
return _CHARCLASS;
|
||||||
|
}
|
||||||
|
|
||||||
func (cclass *_CharClass) print() {
|
func (cclass *_CharClass) print() {
|
||||||
print("charclass");
|
print("charclass");
|
||||||
|
|
@ -174,11 +204,11 @@ func (cclass *_CharClass) addRange(a, b int) {
|
||||||
if n >= cap(cclass.ranges) {
|
if n >= cap(cclass.ranges) {
|
||||||
nr := make([]int, n, 2*n);
|
nr := make([]int, n, 2*n);
|
||||||
for i, j := range nr {
|
for i, j := range nr {
|
||||||
nr[i] = j
|
nr[i] = j;
|
||||||
}
|
}
|
||||||
cclass.ranges = nr;
|
cclass.ranges = nr;
|
||||||
}
|
}
|
||||||
cclass.ranges = cclass.ranges[0:n+2];
|
cclass.ranges = cclass.ranges[0 : n+2];
|
||||||
cclass.ranges[n] = a;
|
cclass.ranges[n] = a;
|
||||||
n++;
|
n++;
|
||||||
cclass.ranges[n] = b;
|
cclass.ranges[n] = b;
|
||||||
|
|
@ -190,10 +220,10 @@ func (cclass *_CharClass) matches(c int) bool {
|
||||||
min := cclass.ranges[i];
|
min := cclass.ranges[i];
|
||||||
max := cclass.ranges[i+1];
|
max := cclass.ranges[i+1];
|
||||||
if min <= c && c <= max {
|
if min <= c && c <= max {
|
||||||
return !cclass.negate
|
return !cclass.negate;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cclass.negate
|
return cclass.negate;
|
||||||
}
|
}
|
||||||
|
|
||||||
func newCharClass() *_CharClass {
|
func newCharClass() *_CharClass {
|
||||||
|
|
@ -204,19 +234,27 @@ func newCharClass() *_CharClass {
|
||||||
|
|
||||||
// --- ANY any character
|
// --- ANY any character
|
||||||
type _Any struct {
|
type _Any struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (any *_Any) kind() int { return _ANY }
|
func (any *_Any) kind() int {
|
||||||
func (any *_Any) print() { print("any") }
|
return _ANY;
|
||||||
|
}
|
||||||
|
func (any *_Any) print() {
|
||||||
|
print("any");
|
||||||
|
}
|
||||||
|
|
||||||
// --- NOTNL any character but newline
|
// --- NOTNL any character but newline
|
||||||
type _NotNl struct {
|
type _NotNl struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (notnl *_NotNl) kind() int { return _NOTNL }
|
func (notnl *_NotNl) kind() int {
|
||||||
func (notnl *_NotNl) print() { print("notnl") }
|
return _NOTNL;
|
||||||
|
}
|
||||||
|
func (notnl *_NotNl) print() {
|
||||||
|
print("notnl");
|
||||||
|
}
|
||||||
|
|
||||||
// --- BRA parenthesized expression
|
// --- BRA parenthesized expression
|
||||||
type _Bra struct {
|
type _Bra struct {
|
||||||
|
|
@ -224,8 +262,12 @@ type _Bra struct {
|
||||||
n int; // subexpression number
|
n int; // subexpression number
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bra *_Bra) kind() int { return _BRA }
|
func (bra *_Bra) kind() int {
|
||||||
func (bra *_Bra) print() { print("bra", bra.n); }
|
return _BRA;
|
||||||
|
}
|
||||||
|
func (bra *_Bra) print() {
|
||||||
|
print("bra", bra.n);
|
||||||
|
}
|
||||||
|
|
||||||
// --- EBRA end of parenthesized expression
|
// --- EBRA end of parenthesized expression
|
||||||
type _Ebra struct {
|
type _Ebra struct {
|
||||||
|
|
@ -233,8 +275,12 @@ type _Ebra struct {
|
||||||
n int; // subexpression number
|
n int; // subexpression number
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ebra *_Ebra) kind() int { return _EBRA }
|
func (ebra *_Ebra) kind() int {
|
||||||
func (ebra *_Ebra) print() { print("ebra ", ebra.n); }
|
return _EBRA;
|
||||||
|
}
|
||||||
|
func (ebra *_Ebra) print() {
|
||||||
|
print("ebra ", ebra.n);
|
||||||
|
}
|
||||||
|
|
||||||
// --- ALT alternation
|
// --- ALT alternation
|
||||||
type _Alt struct {
|
type _Alt struct {
|
||||||
|
|
@ -242,16 +288,24 @@ type _Alt struct {
|
||||||
left instr; // other branch
|
left instr; // other branch
|
||||||
}
|
}
|
||||||
|
|
||||||
func (alt *_Alt) kind() int { return _ALT }
|
func (alt *_Alt) kind() int {
|
||||||
func (alt *_Alt) print() { print("alt(", alt.left.index(), ")"); }
|
return _ALT;
|
||||||
|
}
|
||||||
|
func (alt *_Alt) print() {
|
||||||
|
print("alt(", alt.left.index(), ")");
|
||||||
|
}
|
||||||
|
|
||||||
// --- NOP no operation
|
// --- NOP no operation
|
||||||
type _Nop struct {
|
type _Nop struct {
|
||||||
common
|
common;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (nop *_Nop) kind() int { return _NOP }
|
func (nop *_Nop) kind() int {
|
||||||
func (nop *_Nop) print() { print("nop") }
|
return _NOP;
|
||||||
|
}
|
||||||
|
func (nop *_Nop) print() {
|
||||||
|
print("nop");
|
||||||
|
}
|
||||||
|
|
||||||
// report error and exit compiling/executing goroutine
|
// report error and exit compiling/executing goroutine
|
||||||
func (re *Regexp) setError(err string) {
|
func (re *Regexp) setError(err string) {
|
||||||
|
|
@ -266,11 +320,11 @@ func (re *Regexp) add(i instr) instr {
|
||||||
if n >= cap(re.inst) {
|
if n >= cap(re.inst) {
|
||||||
ni := make([]instr, n, 2*n);
|
ni := make([]instr, n, 2*n);
|
||||||
for i, j := range re.inst {
|
for i, j := range re.inst {
|
||||||
ni[i] = j
|
ni[i] = j;
|
||||||
}
|
}
|
||||||
re.inst = ni;
|
re.inst = ni;
|
||||||
}
|
}
|
||||||
re.inst = re.inst[0:n+1];
|
re.inst = re.inst[0 : n+1];
|
||||||
re.inst[n] = i;
|
re.inst[n] = i;
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
@ -290,9 +344,9 @@ func (p *parser) c() int {
|
||||||
|
|
||||||
func (p *parser) nextc() int {
|
func (p *parser) nextc() int {
|
||||||
if p.pos >= len(p.re.expr) {
|
if p.pos >= len(p.re.expr) {
|
||||||
p.ch = endOfFile
|
p.ch = endOfFile;
|
||||||
} else {
|
} else {
|
||||||
c, w := utf8.DecodeRuneInString(p.re.expr[p.pos:len(p.re.expr)]);
|
c, w := utf8.DecodeRuneInString(p.re.expr[p.pos : len(p.re.expr)]);
|
||||||
p.ch = c;
|
p.ch = c;
|
||||||
p.pos += w;
|
p.pos += w;
|
||||||
}
|
}
|
||||||
|
|
@ -312,20 +366,20 @@ func special(c int) bool {
|
||||||
s := `\.+*?()|[]^$`;
|
s := `\.+*?()|[]^$`;
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
if c == int(s[i]) {
|
if c == int(s[i]) {
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
func specialcclass(c int) bool {
|
func specialcclass(c int) bool {
|
||||||
s := `\-[]`;
|
s := `\-[]`;
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
if c == int(s[i]) {
|
if c == int(s[i]) {
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) charClass() instr {
|
func (p *parser) charClass() instr {
|
||||||
|
|
@ -360,7 +414,7 @@ func (p *parser) charClass() instr {
|
||||||
case c == 'n':
|
case c == 'n':
|
||||||
c = '\n';
|
c = '\n';
|
||||||
case specialcclass(c):
|
case specialcclass(c):
|
||||||
// c is as delivered
|
// c is as delivered
|
||||||
default:
|
default:
|
||||||
p.re.setError(ErrBadBackslash);
|
p.re.setError(ErrBadBackslash);
|
||||||
}
|
}
|
||||||
|
|
@ -383,7 +437,7 @@ func (p *parser) charClass() instr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return iNULL
|
return iNULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) term() (start, end instr) {
|
func (p *parser) term() (start, end instr) {
|
||||||
|
|
@ -438,9 +492,9 @@ func (p *parser) term() (start, end instr) {
|
||||||
ebra.n = nbra;
|
ebra.n = nbra;
|
||||||
if start == iNULL {
|
if start == iNULL {
|
||||||
if end == iNULL {
|
if end == iNULL {
|
||||||
p.re.setError(ErrInternal)
|
p.re.setError(ErrInternal);
|
||||||
}
|
}
|
||||||
start = ebra
|
start = ebra;
|
||||||
} else {
|
} else {
|
||||||
end.setNext(ebra);
|
end.setNext(ebra);
|
||||||
}
|
}
|
||||||
|
|
@ -454,7 +508,7 @@ func (p *parser) term() (start, end instr) {
|
||||||
case c == 'n':
|
case c == 'n':
|
||||||
c = '\n';
|
c = '\n';
|
||||||
case special(c):
|
case special(c):
|
||||||
// c is as delivered
|
// c is as delivered
|
||||||
default:
|
default:
|
||||||
p.re.setError(ErrBadBackslash);
|
p.re.setError(ErrBadBackslash);
|
||||||
}
|
}
|
||||||
|
|
@ -463,7 +517,7 @@ func (p *parser) term() (start, end instr) {
|
||||||
p.nextc();
|
p.nextc();
|
||||||
start = newChar(c);
|
start = newChar(c);
|
||||||
p.re.add(start);
|
p.re.add(start);
|
||||||
return start, start
|
return start, start;
|
||||||
}
|
}
|
||||||
panic("unreachable");
|
panic("unreachable");
|
||||||
}
|
}
|
||||||
|
|
@ -471,7 +525,7 @@ func (p *parser) term() (start, end instr) {
|
||||||
func (p *parser) closure() (start, end instr) {
|
func (p *parser) closure() (start, end instr) {
|
||||||
start, end = p.term();
|
start, end = p.term();
|
||||||
if start == iNULL {
|
if start == iNULL {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
switch p.c() {
|
switch p.c() {
|
||||||
case '*':
|
case '*':
|
||||||
|
|
@ -480,7 +534,7 @@ func (p *parser) closure() (start, end instr) {
|
||||||
p.re.add(alt);
|
p.re.add(alt);
|
||||||
end.setNext(alt); // after end, do alt
|
end.setNext(alt); // after end, do alt
|
||||||
alt.left = start; // alternate brach: return to start
|
alt.left = start; // alternate brach: return to start
|
||||||
start = alt; // alt becomes new (start, end)
|
start = alt; // alt becomes new (start, end)
|
||||||
end = alt;
|
end = alt;
|
||||||
case '+':
|
case '+':
|
||||||
// (start,end)+:
|
// (start,end)+:
|
||||||
|
|
@ -488,7 +542,7 @@ func (p *parser) closure() (start, end instr) {
|
||||||
p.re.add(alt);
|
p.re.add(alt);
|
||||||
end.setNext(alt); // after end, do alt
|
end.setNext(alt); // after end, do alt
|
||||||
alt.left = start; // alternate brach: return to start
|
alt.left = start; // alternate brach: return to start
|
||||||
end = alt; // start is unchanged; end is alt
|
end = alt; // start is unchanged; end is alt
|
||||||
case '?':
|
case '?':
|
||||||
// (start,end)?:
|
// (start,end)?:
|
||||||
alt := new(_Alt);
|
alt := new(_Alt);
|
||||||
|
|
@ -498,16 +552,16 @@ func (p *parser) closure() (start, end instr) {
|
||||||
alt.left = start; // alternate branch is start
|
alt.left = start; // alternate branch is start
|
||||||
alt.setNext(nop); // follow on to nop
|
alt.setNext(nop); // follow on to nop
|
||||||
end.setNext(nop); // after end, go to nop
|
end.setNext(nop); // after end, go to nop
|
||||||
start = alt; // start is now alt
|
start = alt; // start is now alt
|
||||||
end = nop; // end is nop pointed to by both branches
|
end = nop; // end is nop pointed to by both branches
|
||||||
default:
|
default:
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
switch p.nextc() {
|
switch p.nextc() {
|
||||||
case '*', '+', '?':
|
case '*', '+', '?':
|
||||||
p.re.setError(ErrBadClosure);
|
p.re.setError(ErrBadClosure);
|
||||||
}
|
}
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) concatenation() (start, end instr) {
|
func (p *parser) concatenation() (start, end instr) {
|
||||||
|
|
@ -556,16 +610,16 @@ func (p *parser) regexp() (start, end instr) {
|
||||||
|
|
||||||
func unNop(i instr) instr {
|
func unNop(i instr) instr {
|
||||||
for i.kind() == _NOP {
|
for i.kind() == _NOP {
|
||||||
i = i.next()
|
i = i.next();
|
||||||
}
|
}
|
||||||
return i
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (re *Regexp) eliminateNops() {
|
func (re *Regexp) eliminateNops() {
|
||||||
for i := 0; i < len(re.inst); i++ {
|
for i := 0; i < len(re.inst); i++ {
|
||||||
inst := re.inst[i];
|
inst := re.inst[i];
|
||||||
if inst.kind() == _END {
|
if inst.kind() == _END {
|
||||||
continue
|
continue;
|
||||||
}
|
}
|
||||||
inst.setNext(unNop(inst.next()));
|
inst.setNext(unNop(inst.next()));
|
||||||
if inst.kind() == _ALT {
|
if inst.kind() == _ALT {
|
||||||
|
|
@ -581,7 +635,7 @@ func (re *Regexp) dump() {
|
||||||
print(inst.index(), ": ");
|
print(inst.index(), ": ");
|
||||||
inst.print();
|
inst.print();
|
||||||
if inst.kind() != _END {
|
if inst.kind() != _END {
|
||||||
print(" -> ", inst.next().index())
|
print(" -> ", inst.next().index());
|
||||||
}
|
}
|
||||||
print("\n");
|
print("\n");
|
||||||
}
|
}
|
||||||
|
|
@ -626,7 +680,7 @@ func CompileRegexp(str string) (regexp *Regexp, error string) {
|
||||||
ch := make(chan *Regexp);
|
ch := make(chan *Regexp);
|
||||||
go compiler(str, ch);
|
go compiler(str, ch);
|
||||||
re := <-ch;
|
re := <-ch;
|
||||||
return re, re.error
|
return re, re.error;
|
||||||
}
|
}
|
||||||
|
|
||||||
type state struct {
|
type state struct {
|
||||||
|
|
@ -643,10 +697,10 @@ func addState(s []state, inst instr, match []int) []state {
|
||||||
// TODO: Once the state is a vector and we can do insert, have inputs always
|
// TODO: Once the state is a vector and we can do insert, have inputs always
|
||||||
// go in order correctly and this "earlier" test is never necessary,
|
// go in order correctly and this "earlier" test is never necessary,
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
if s[i].inst.index() == index && // same instruction
|
if s[i].inst.index() == index && // same instruction
|
||||||
s[i].match[0] < pos { // earlier match already going; lefmost wins
|
s[i].match[0] < pos { // earlier match already going; lefmost wins
|
||||||
return s
|
return s;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if l == cap(s) {
|
if l == cap(s) {
|
||||||
s1 := make([]state, 2*l)[0:l];
|
s1 := make([]state, 2*l)[0:l];
|
||||||
|
|
@ -655,7 +709,7 @@ func addState(s []state, inst instr, match []int) []state {
|
||||||
}
|
}
|
||||||
s = s1;
|
s = s1;
|
||||||
}
|
}
|
||||||
s = s[0:l+1];
|
s = s[0 : l+1];
|
||||||
s[l].inst = inst;
|
s[l].inst = inst;
|
||||||
s[l].match = match;
|
s[l].match = match;
|
||||||
return s;
|
return s;
|
||||||
|
|
@ -672,16 +726,16 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
|
||||||
found := false;
|
found := false;
|
||||||
end := len(str);
|
end := len(str);
|
||||||
if bytes != nil {
|
if bytes != nil {
|
||||||
end = len(bytes)
|
end = len(bytes);
|
||||||
}
|
}
|
||||||
for pos <= end {
|
for pos <= end {
|
||||||
if !found {
|
if !found {
|
||||||
// prime the pump if we haven't seen a match yet
|
// prime the pump if we haven't seen a match yet
|
||||||
match := make([]int, 2*(re.nbra+1));
|
match := make([]int, 2*(re.nbra + 1));
|
||||||
for i := 0; i < len(match); i++ {
|
for i := 0; i < len(match); i++ {
|
||||||
match[i] = -1; // no match seen; catches cases like "a(b)?c" on "ac"
|
match[i] = -1; // no match seen; catches cases like "a(b)?c" on "ac"
|
||||||
}
|
}
|
||||||
match[0] = pos;
|
match[0] = pos;
|
||||||
s[out] = addState(s[out], re.start.next(), match);
|
s[out] = addState(s[out], re.start.next(), match);
|
||||||
}
|
}
|
||||||
in, out = out, in; // old out state is new in state
|
in, out = out, in; // old out state is new in state
|
||||||
|
|
@ -704,27 +758,27 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
|
||||||
switch s[in][i].inst.kind() {
|
switch s[in][i].inst.kind() {
|
||||||
case _BOT:
|
case _BOT:
|
||||||
if pos == 0 {
|
if pos == 0 {
|
||||||
s[in] = addState(s[in], st.inst.next(), st.match)
|
s[in] = addState(s[in], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _EOT:
|
case _EOT:
|
||||||
if pos == end {
|
if pos == end {
|
||||||
s[in] = addState(s[in], st.inst.next(), st.match)
|
s[in] = addState(s[in], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _CHAR:
|
case _CHAR:
|
||||||
if c == st.inst.(*_Char).char {
|
if c == st.inst.(*_Char).char {
|
||||||
s[out] = addState(s[out], st.inst.next(), st.match)
|
s[out] = addState(s[out], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _CHARCLASS:
|
case _CHARCLASS:
|
||||||
if st.inst.(*_CharClass).matches(c) {
|
if st.inst.(*_CharClass).matches(c) {
|
||||||
s[out] = addState(s[out], st.inst.next(), st.match)
|
s[out] = addState(s[out], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _ANY:
|
case _ANY:
|
||||||
if c != endOfFile {
|
if c != endOfFile {
|
||||||
s[out] = addState(s[out], st.inst.next(), st.match)
|
s[out] = addState(s[out], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _NOTNL:
|
case _NOTNL:
|
||||||
if c != endOfFile && c != '\n' {
|
if c != endOfFile && c != '\n' {
|
||||||
s[out] = addState(s[out], st.inst.next(), st.match)
|
s[out] = addState(s[out], st.inst.next(), st.match);
|
||||||
}
|
}
|
||||||
case _BRA:
|
case _BRA:
|
||||||
n := st.inst.(*_Bra).n;
|
n := st.inst.(*_Bra).n;
|
||||||
|
|
@ -732,21 +786,21 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
|
||||||
s[in] = addState(s[in], st.inst.next(), st.match);
|
s[in] = addState(s[in], st.inst.next(), st.match);
|
||||||
case _EBRA:
|
case _EBRA:
|
||||||
n := st.inst.(*_Ebra).n;
|
n := st.inst.(*_Ebra).n;
|
||||||
st.match[2*n+1] = pos;
|
st.match[2*n + 1] = pos;
|
||||||
s[in] = addState(s[in], st.inst.next(), st.match);
|
s[in] = addState(s[in], st.inst.next(), st.match);
|
||||||
case _ALT:
|
case _ALT:
|
||||||
s[in] = addState(s[in], st.inst.(*_Alt).left, st.match);
|
s[in] = addState(s[in], st.inst.(*_Alt).left, st.match);
|
||||||
// give other branch a copy of this match vector
|
// give other branch a copy of this match vector
|
||||||
s1 := make([]int, 2*(re.nbra+1));
|
s1 := make([]int, 2*(re.nbra + 1));
|
||||||
for i := 0; i < len(s1); i++ {
|
for i := 0; i < len(s1); i++ {
|
||||||
s1[i] = st.match[i]
|
s1[i] = st.match[i];
|
||||||
}
|
}
|
||||||
s[in] = addState(s[in], st.inst.next(), s1);
|
s[in] = addState(s[in], st.inst.next(), s1);
|
||||||
case _END:
|
case _END:
|
||||||
// choose leftmost longest
|
// choose leftmost longest
|
||||||
if !found || // first
|
if !found || // first
|
||||||
st.match[0] < final.match[0] || // leftmost
|
st.match[0] < final.match[0] || // leftmost
|
||||||
(st.match[0] == final.match[0] && pos > final.match[1]) { // longest
|
(st.match[0] == final.match[0] && pos > final.match[1]) { // longest
|
||||||
final = st;
|
final = st;
|
||||||
final.match[1] = pos;
|
final.match[1] = pos;
|
||||||
}
|
}
|
||||||
|
|
@ -770,7 +824,7 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
|
||||||
// A negative value means the subexpression did not match any element of the string.
|
// A negative value means the subexpression did not match any element of the string.
|
||||||
// An empty array means "no match".
|
// An empty array means "no match".
|
||||||
func (re *Regexp) ExecuteString(s string) (a []int) {
|
func (re *Regexp) ExecuteString(s string) (a []int) {
|
||||||
return re.doExecute(s, nil, 0)
|
return re.doExecute(s, nil, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -782,21 +836,21 @@ func (re *Regexp) ExecuteString(s string) (a []int) {
|
||||||
// A negative value means the subexpression did not match any element of the slice.
|
// A negative value means the subexpression did not match any element of the slice.
|
||||||
// An empty array means "no match".
|
// An empty array means "no match".
|
||||||
func (re *Regexp) Execute(b []byte) (a []int) {
|
func (re *Regexp) Execute(b []byte) (a []int) {
|
||||||
return re.doExecute("", b, 0)
|
return re.doExecute("", b, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// MatchString returns whether the Regexp matches the string s.
|
// MatchString returns whether the Regexp matches the string s.
|
||||||
// The return value is a boolean: true for match, false for no match.
|
// The return value is a boolean: true for match, false for no match.
|
||||||
func (re *Regexp) MatchString(s string) bool {
|
func (re *Regexp) MatchString(s string) bool {
|
||||||
return len(re.doExecute(s, nil, 0)) > 0
|
return len(re.doExecute(s, nil, 0)) > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Match returns whether the Regexp matches the byte slice b.
|
// Match returns whether the Regexp matches the byte slice b.
|
||||||
// The return value is a boolean: true for match, false for no match.
|
// The return value is a boolean: true for match, false for no match.
|
||||||
func (re *Regexp) Match(b []byte) bool {
|
func (re *Regexp) Match(b []byte) bool {
|
||||||
return len(re.doExecute("", b, 0)) > 0
|
return len(re.doExecute("", b, 0)) > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -808,15 +862,15 @@ func (re *Regexp) Match(b []byte) bool {
|
||||||
func (re *Regexp) MatchStrings(s string) (a []string) {
|
func (re *Regexp) MatchStrings(s string) (a []string) {
|
||||||
r := re.doExecute(s, nil, 0);
|
r := re.doExecute(s, nil, 0);
|
||||||
if r == nil {
|
if r == nil {
|
||||||
return nil
|
return nil;
|
||||||
}
|
}
|
||||||
a = make([]string, len(r)/2);
|
a = make([]string, len(r)/2);
|
||||||
for i := 0; i < len(r); i += 2 {
|
for i := 0; i < len(r); i += 2 {
|
||||||
if r[i] != -1 { // -1 means no match for this subexpression
|
if r[i] != -1 { // -1 means no match for this subexpression
|
||||||
a[i/2] = s[r[i] : r[i+1]]
|
a[i/2] = s[r[i]:r[i+1]];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchSlices matches the Regexp against the byte slice b.
|
// MatchSlices matches the Regexp against the byte slice b.
|
||||||
|
|
@ -827,15 +881,15 @@ func (re *Regexp) MatchStrings(s string) (a []string) {
|
||||||
func (re *Regexp) MatchSlices(b []byte) (a [][]byte) {
|
func (re *Regexp) MatchSlices(b []byte) (a [][]byte) {
|
||||||
r := re.doExecute("", b, 0);
|
r := re.doExecute("", b, 0);
|
||||||
if r == nil {
|
if r == nil {
|
||||||
return nil
|
return nil;
|
||||||
}
|
}
|
||||||
a = make([][]byte, len(r)/2);
|
a = make([][]byte, len(r)/2);
|
||||||
for i := 0; i < len(r); i += 2 {
|
for i := 0; i < len(r); i += 2 {
|
||||||
if r[i] != -1 { // -1 means no match for this subexpression
|
if r[i] != -1 { // -1 means no match for this subexpression
|
||||||
a[i/2] = b[r[i] : r[i+1]]
|
a[i/2] = b[r[i]:r[i+1]];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchString checks whether a textual regular expression
|
// MatchString checks whether a textual regular expression
|
||||||
|
|
@ -844,9 +898,9 @@ func (re *Regexp) MatchSlices(b []byte) (a [][]byte) {
|
||||||
func MatchString(pattern string, s string) (matched bool, error string) {
|
func MatchString(pattern string, s string) (matched bool, error string) {
|
||||||
re, err := CompileRegexp(pattern);
|
re, err := CompileRegexp(pattern);
|
||||||
if err != "" {
|
if err != "" {
|
||||||
return false, err
|
return false, err;
|
||||||
}
|
}
|
||||||
return re.MatchString(s), ""
|
return re.MatchString(s), "";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match checks whether a textual regular expression
|
// Match checks whether a textual regular expression
|
||||||
|
|
@ -855,7 +909,7 @@ func MatchString(pattern string, s string) (matched bool, error string) {
|
||||||
func Match(pattern string, b []byte) (matched bool, error string) {
|
func Match(pattern string, b []byte) (matched bool, error string) {
|
||||||
re, err := CompileRegexp(pattern);
|
re, err := CompileRegexp(pattern);
|
||||||
if err != "" {
|
if err != "" {
|
||||||
return false, err
|
return false, err;
|
||||||
}
|
}
|
||||||
return re.Match(b), ""
|
return re.Match(b), "";
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -33,22 +33,23 @@ type stringError struct {
|
||||||
re string;
|
re string;
|
||||||
err string;
|
err string;
|
||||||
}
|
}
|
||||||
|
|
||||||
var bad_re = []stringError{
|
var bad_re = []stringError{
|
||||||
stringError{ `*`, ErrBareClosure },
|
stringError{`*`, ErrBareClosure},
|
||||||
stringError{ `(abc`, ErrUnmatchedLpar },
|
stringError{`(abc`, ErrUnmatchedLpar},
|
||||||
stringError{ `abc)`, ErrUnmatchedRpar },
|
stringError{`abc)`, ErrUnmatchedRpar},
|
||||||
stringError{ `x[a-z`, ErrUnmatchedLbkt },
|
stringError{`x[a-z`, ErrUnmatchedLbkt},
|
||||||
stringError{ `abc]`, ErrUnmatchedRbkt },
|
stringError{`abc]`, ErrUnmatchedRbkt},
|
||||||
stringError{ `[z-a]`, ErrBadRange },
|
stringError{`[z-a]`, ErrBadRange},
|
||||||
stringError{ `abc\`, ErrExtraneousBackslash },
|
stringError{`abc\`, ErrExtraneousBackslash},
|
||||||
stringError{ `a**`, ErrBadClosure },
|
stringError{`a**`, ErrBadClosure},
|
||||||
stringError{ `a*+`, ErrBadClosure },
|
stringError{`a*+`, ErrBadClosure},
|
||||||
stringError{ `a??`, ErrBadClosure },
|
stringError{`a??`, ErrBadClosure},
|
||||||
stringError{ `*`, ErrBareClosure },
|
stringError{`*`, ErrBareClosure},
|
||||||
stringError{ `\x`, ErrBadBackslash },
|
stringError{`\x`, ErrBadBackslash},
|
||||||
}
|
}
|
||||||
|
|
||||||
type vec []int;
|
type vec []int
|
||||||
|
|
||||||
type tester struct {
|
type tester struct {
|
||||||
re string;
|
re string;
|
||||||
|
|
@ -56,33 +57,33 @@ type tester struct {
|
||||||
match vec;
|
match vec;
|
||||||
}
|
}
|
||||||
|
|
||||||
var matches = []tester {
|
var matches = []tester{
|
||||||
tester{ ``, "", vec{0,0} },
|
tester{``, "", vec{0, 0}},
|
||||||
tester{ `a`, "a", vec{0,1} },
|
tester{`a`, "a", vec{0, 1}},
|
||||||
tester{ `x`, "y", vec{} },
|
tester{`x`, "y", vec{}},
|
||||||
tester{ `b`, "abc", vec{1,2} },
|
tester{`b`, "abc", vec{1, 2}},
|
||||||
tester{ `.`, "a", vec{0,1} },
|
tester{`.`, "a", vec{0, 1}},
|
||||||
tester{ `.*`, "abcdef", vec{0,6} },
|
tester{`.*`, "abcdef", vec{0, 6}},
|
||||||
tester{ `^abcd$`, "abcd", vec{0,4} },
|
tester{`^abcd$`, "abcd", vec{0, 4}},
|
||||||
tester{ `^bcd'`, "abcdef", vec{} },
|
tester{`^bcd'`, "abcdef", vec{}},
|
||||||
tester{ `^abcd$`, "abcde", vec{} },
|
tester{`^abcd$`, "abcde", vec{}},
|
||||||
tester{ `a+`, "baaab", vec{1,4} },
|
tester{`a+`, "baaab", vec{1, 4}},
|
||||||
tester{ `a*`, "baaab", vec{0,0} },
|
tester{`a*`, "baaab", vec{0, 0}},
|
||||||
tester{ `[a-z]+`, "abcd", vec{0,4} },
|
tester{`[a-z]+`, "abcd", vec{0, 4}},
|
||||||
tester{ `[^a-z]+`, "ab1234cd", vec{2,6} },
|
tester{`[^a-z]+`, "ab1234cd", vec{2, 6}},
|
||||||
tester{ `[a\-\]z]+`, "az]-bcz", vec{0,4} },
|
tester{`[a\-\]z]+`, "az]-bcz", vec{0, 4}},
|
||||||
tester{ `[^\n]+`, "abcd\n", vec{0,4} },
|
tester{`[^\n]+`, "abcd\n", vec{0, 4}},
|
||||||
tester{ `[日本語]+`, "日本語日本語", vec{0,18} },
|
tester{`[日本語]+`, "日本語日本語", vec{0, 18}},
|
||||||
tester{ `()`, "", vec{0,0, 0,0} },
|
tester{`()`, "", vec{0, 0, 0, 0}},
|
||||||
tester{ `(a)`, "a", vec{0,1, 0,1} },
|
tester{`(a)`, "a", vec{0, 1, 0, 1}},
|
||||||
tester{ `(.)(.)`, "日a", vec{0,4, 0,3, 3,4} },
|
tester{`(.)(.)`, "日a", vec{0, 4, 0, 3, 3, 4}},
|
||||||
tester{ `(.*)`, "", vec{0,0, 0,0} },
|
tester{`(.*)`, "", vec{0, 0, 0, 0}},
|
||||||
tester{ `(.*)`, "abcd", vec{0,4, 0,4} },
|
tester{`(.*)`, "abcd", vec{0, 4, 0, 4}},
|
||||||
tester{ `(..)(..)`, "abcd", vec{0,4, 0,2, 2,4} },
|
tester{`(..)(..)`, "abcd", vec{0, 4, 0, 2, 2, 4}},
|
||||||
tester{ `(([^xyz]*)(d))`, "abcd", vec{0,4, 0,4, 0,3, 3,4} },
|
tester{`(([^xyz]*)(d))`, "abcd", vec{0, 4, 0, 4, 0, 3, 3, 4}},
|
||||||
tester{ `((a|b|c)*(d))`, "abcd", vec{0,4, 0,4, 2,3, 3,4} },
|
tester{`((a|b|c)*(d))`, "abcd", vec{0, 4, 0, 4, 2, 3, 3, 4}},
|
||||||
tester{ `(((a|b|c)*)(d))`, "abcd", vec{0,4, 0,4, 0,3, 2,3, 3,4} },
|
tester{`(((a|b|c)*)(d))`, "abcd", vec{0, 4, 0, 4, 0, 3, 2, 3, 3, 4}},
|
||||||
tester{ `a*(|(b))c*`, "aacc", vec{0,4, 2,2, -1,-1} },
|
tester{`a*(|(b))c*`, "aacc", vec{0, 4, 2, 2, -1, -1}},
|
||||||
}
|
}
|
||||||
|
|
||||||
func compileTest(t *T, expr string, error string) *Regexp {
|
func compileTest(t *T, expr string, error string) *Regexp {
|
||||||
|
|
@ -90,7 +91,7 @@ func compileTest(t *T, expr string, error string) *Regexp {
|
||||||
if err != error {
|
if err != error {
|
||||||
t.Error("compiling `", expr, "`; unexpected error: ", err);
|
t.Error("compiling `", expr, "`; unexpected error: ", err);
|
||||||
}
|
}
|
||||||
return re
|
return re;
|
||||||
}
|
}
|
||||||
|
|
||||||
func printVec(t *T, m []int) {
|
func printVec(t *T, m []int) {
|
||||||
|
|
@ -99,7 +100,7 @@ func printVec(t *T, m []int) {
|
||||||
t.Log("\t<no match>");
|
t.Log("\t<no match>");
|
||||||
} else {
|
} else {
|
||||||
for i := 0; i < l; i = i+2 {
|
for i := 0; i < l; i = i+2 {
|
||||||
t.Log("\t", m[i], ",", m[i+1])
|
t.Log("\t", m[i], ",", m[i+1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -110,7 +111,7 @@ func printStrings(t *T, m []string) {
|
||||||
t.Log("\t<no match>");
|
t.Log("\t<no match>");
|
||||||
} else {
|
} else {
|
||||||
for i := 0; i < l; i = i+2 {
|
for i := 0; i < l; i = i+2 {
|
||||||
t.Logf("\t%q", m[i])
|
t.Logf("\t%q", m[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -121,7 +122,7 @@ func printBytes(t *T, b [][]byte) {
|
||||||
t.Log("\t<no match>");
|
t.Log("\t<no match>");
|
||||||
} else {
|
} else {
|
||||||
for i := 0; i < l; i = i+2 {
|
for i := 0; i < l; i = i+2 {
|
||||||
t.Logf("\t%q", b[i])
|
t.Logf("\t%q", b[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -129,46 +130,46 @@ func printBytes(t *T, b [][]byte) {
|
||||||
func equal(m1, m2 []int) bool {
|
func equal(m1, m2 []int) bool {
|
||||||
l := len(m1);
|
l := len(m1);
|
||||||
if l != len(m2) {
|
if l != len(m2) {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
if m1[i] != m2[i] {
|
if m1[i] != m2[i] {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
func equalStrings(m1, m2 []string) bool {
|
func equalStrings(m1, m2 []string) bool {
|
||||||
l := len(m1);
|
l := len(m1);
|
||||||
if l != len(m2) {
|
if l != len(m2) {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
if m1[i] != m2[i] {
|
if m1[i] != m2[i] {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
func equalBytes(m1 [][]byte, m2 []string) bool {
|
func equalBytes(m1 [][]byte, m2 []string) bool {
|
||||||
l := len(m1);
|
l := len(m1);
|
||||||
if l != len(m2) {
|
if l != len(m2) {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
if string(m1[i]) != m2[i] {
|
if string(m1[i]) != m2[i] {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
func executeTest(t *T, expr string, str string, match []int) {
|
func executeTest(t *T, expr string, str string, match []int) {
|
||||||
re := compileTest(t, expr, "");
|
re := compileTest(t, expr, "");
|
||||||
if re == nil {
|
if re == nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
m := re.ExecuteString(str);
|
m := re.ExecuteString(str);
|
||||||
if !equal(m, match) {
|
if !equal(m, match) {
|
||||||
|
|
@ -195,21 +196,21 @@ func TestGoodCompile(t *T) {
|
||||||
|
|
||||||
func TestBadCompile(t *T) {
|
func TestBadCompile(t *T) {
|
||||||
for i := 0; i < len(bad_re); i++ {
|
for i := 0; i < len(bad_re); i++ {
|
||||||
compileTest(t, bad_re[i].re, bad_re[i].err)
|
compileTest(t, bad_re[i].re, bad_re[i].err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExecute(t *T) {
|
func TestExecute(t *T) {
|
||||||
for i := 0; i < len(matches); i++ {
|
for i := 0; i < len(matches); i++ {
|
||||||
test := &matches[i];
|
test := &matches[i];
|
||||||
executeTest(t, test.re, test.text, test.match)
|
executeTest(t, test.re, test.text, test.match);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchTest(t *T, expr string, str string, match []int) {
|
func matchTest(t *T, expr string, str string, match []int) {
|
||||||
re := compileTest(t, expr, "");
|
re := compileTest(t, expr, "");
|
||||||
if re == nil {
|
if re == nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
m := re.MatchString(str);
|
m := re.MatchString(str);
|
||||||
if m != (len(match) > 0) {
|
if m != (len(match) > 0) {
|
||||||
|
|
@ -225,18 +226,18 @@ func matchTest(t *T, expr string, str string, match []int) {
|
||||||
func TestMatch(t *T) {
|
func TestMatch(t *T) {
|
||||||
for i := 0; i < len(matches); i++ {
|
for i := 0; i < len(matches); i++ {
|
||||||
test := &matches[i];
|
test := &matches[i];
|
||||||
matchTest(t, test.re, test.text, test.match)
|
matchTest(t, test.re, test.text, test.match);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchStringsTest(t *T, expr string, str string, match []int) {
|
func matchStringsTest(t *T, expr string, str string, match []int) {
|
||||||
re := compileTest(t, expr, "");
|
re := compileTest(t, expr, "");
|
||||||
if re == nil {
|
if re == nil {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
strs := make([]string, len(match)/2);
|
strs := make([]string, len(match)/2);
|
||||||
for i := 0; i < len(match); i++ {
|
for i := 0; i < len(match); i++ {
|
||||||
strs[i/2] = str[match[i] : match[i+1]]
|
strs[i/2] = str[match[i]:match[i+1]];
|
||||||
}
|
}
|
||||||
m := re.MatchStrings(str);
|
m := re.MatchStrings(str);
|
||||||
if !equalStrings(m, strs) {
|
if !equalStrings(m, strs) {
|
||||||
|
|
@ -258,14 +259,14 @@ func matchStringsTest(t *T, expr string, str string, match []int) {
|
||||||
func TestMatchStrings(t *T) {
|
func TestMatchStrings(t *T) {
|
||||||
for i := 0; i < len(matches); i++ {
|
for i := 0; i < len(matches); i++ {
|
||||||
test := &matches[i];
|
test := &matches[i];
|
||||||
matchTest(t, test.re, test.text, test.match)
|
matchTest(t, test.re, test.text, test.match);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchFunctionTest(t *T, expr string, str string, match []int) {
|
func matchFunctionTest(t *T, expr string, str string, match []int) {
|
||||||
m, err := MatchString(expr, str);
|
m, err := MatchString(expr, str);
|
||||||
if err == "" {
|
if err == "" {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
if m != (len(match) > 0) {
|
if m != (len(match) > 0) {
|
||||||
t.Error("function Match failure on `", expr, "` matching `", str, "`:", m, "should be", len(match) > 0);
|
t.Error("function Match failure on `", expr, "` matching `", str, "`:", m, "should be", len(match) > 0);
|
||||||
|
|
@ -275,6 +276,6 @@ func matchFunctionTest(t *T, expr string, str string, match []int) {
|
||||||
func TestMatchFunction(t *T) {
|
func TestMatchFunction(t *T) {
|
||||||
for i := 0; i < len(matches); i++ {
|
for i := 0; i < len(matches); i++ {
|
||||||
test := &matches[i];
|
test := &matches[i];
|
||||||
matchFunctionTest(t, test.re, test.text, test.match)
|
matchFunctionTest(t, test.re, test.text, test.match);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -67,13 +67,13 @@ func (t *T) FailNow() {
|
||||||
// Log formats its arguments using default formatting, analogous to Print(),
|
// Log formats its arguments using default formatting, analogous to Print(),
|
||||||
// and records the text in the error log.
|
// and records the text in the error log.
|
||||||
func (t *T) Log(args ...) {
|
func (t *T) Log(args ...) {
|
||||||
t.errors += "\t" + tabify(fmt.Sprintln(args));
|
t.errors += "\t"+tabify(fmt.Sprintln(args));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log formats its arguments according to the format, analogous to Printf(),
|
// Log formats its arguments according to the format, analogous to Printf(),
|
||||||
// and records the text in the error log.
|
// and records the text in the error log.
|
||||||
func (t *T) Logf(format string, args ...) {
|
func (t *T) Logf(format string, args ...) {
|
||||||
t.errors += "\t" + tabify(fmt.Sprintf(format, args));
|
t.errors += "\t"+tabify(fmt.Sprintf(format, args));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error is equivalent to Log() followed by Fail().
|
// Error is equivalent to Log() followed by Fail().
|
||||||
|
|
|
||||||
|
|
@ -211,8 +211,8 @@ func (t *Time) Seconds() int64 {
|
||||||
sec := day * secondsPerDay;
|
sec := day * secondsPerDay;
|
||||||
|
|
||||||
// Add in time elapsed today.
|
// Add in time elapsed today.
|
||||||
sec += int64(t.Hour) * 3600;
|
sec += int64(t.Hour)*3600;
|
||||||
sec += int64(t.Minute) * 60;
|
sec += int64(t.Minute)*60;
|
||||||
sec += int64(t.Second);
|
sec += int64(t.Second);
|
||||||
|
|
||||||
// Convert from seconds since 2001 to seconds since 1970.
|
// Convert from seconds since 2001 to seconds since 1970.
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,9 @@
|
||||||
package time_test
|
package time_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os";
|
"os";
|
||||||
"testing";
|
"testing";
|
||||||
. "time";
|
. "time";
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
@ -18,35 +18,35 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
type TimeTest struct {
|
type TimeTest struct {
|
||||||
seconds int64;
|
seconds int64;
|
||||||
golden Time;
|
golden Time;
|
||||||
}
|
}
|
||||||
|
|
||||||
var utctests = []TimeTest {
|
var utctests = []TimeTest{
|
||||||
TimeTest{0, Time{1970, 1, 1, 0, 0, 0, Thursday, 0, "UTC"}},
|
TimeTest{0, Time{1970, 1, 1, 0, 0, 0, Thursday, 0, "UTC"}},
|
||||||
TimeTest{1221681866, Time{2008, 9, 17, 20, 4, 26, Wednesday, 0, "UTC"}},
|
TimeTest{1221681866, Time{2008, 9, 17, 20, 4, 26, Wednesday, 0, "UTC"}},
|
||||||
TimeTest{-1221681866, Time{1931, 4, 16, 3, 55, 34, Thursday, 0, "UTC"}},
|
TimeTest{-1221681866, Time{1931, 4, 16, 3, 55, 34, Thursday, 0, "UTC"}},
|
||||||
TimeTest{1e18, Time{31688740476, 10, 23, 1, 46, 40, Friday, 0, "UTC"}},
|
TimeTest{1e18, Time{31688740476, 10, 23, 1, 46, 40, Friday, 0, "UTC"}},
|
||||||
TimeTest{-1e18, Time{-31688736537, 3, 10, 22, 13, 20, Tuesday, 0, "UTC"}},
|
TimeTest{-1e18, Time{-31688736537, 3, 10, 22, 13, 20, Tuesday, 0, "UTC"}},
|
||||||
TimeTest{0x7fffffffffffffff, Time{292277026596, 12, 4, 15, 30, 7, Sunday, 0, "UTC"}},
|
TimeTest{0x7fffffffffffffff, Time{292277026596, 12, 4, 15, 30, 7, Sunday, 0, "UTC"}},
|
||||||
TimeTest{-0x8000000000000000, Time{-292277022657, 1, 27, 8, 29, 52, Sunday, 0, "UTC"}}
|
TimeTest{-0x8000000000000000, Time{-292277022657, 1, 27, 8, 29, 52, Sunday, 0, "UTC"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
var localtests = []TimeTest {
|
var localtests = []TimeTest{
|
||||||
TimeTest{0, Time{1969, 12, 31, 16, 0, 0, Wednesday, -8*60*60, "PST"}},
|
TimeTest{0, Time{1969, 12, 31, 16, 0, 0, Wednesday, -8 * 60 * 60, "PST"}},
|
||||||
TimeTest{1221681866, Time{2008, 9, 17, 13, 4, 26, Wednesday, -7*60*60, "PDT"}}
|
TimeTest{1221681866, Time{2008, 9, 17, 13, 4, 26, Wednesday, -7 * 60 * 60, "PDT"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
func same(t, u *Time) bool {
|
func same(t, u *Time) bool {
|
||||||
return t.Year == u.Year
|
return t.Year == u.Year &&
|
||||||
&& t.Month == u.Month
|
t.Month == u.Month &&
|
||||||
&& t.Day == u.Day
|
t.Day == u.Day &&
|
||||||
&& t.Hour == u.Hour
|
t.Hour == u.Hour &&
|
||||||
&& t.Minute == u.Minute
|
t.Minute == u.Minute &&
|
||||||
&& t.Second == u.Second
|
t.Second == u.Second &&
|
||||||
&& t.Weekday == u.Weekday
|
t.Weekday == u.Weekday &&
|
||||||
&& t.ZoneOffset == u.ZoneOffset
|
t.ZoneOffset == u.ZoneOffset &&
|
||||||
&& t.Zone == u.Zone
|
t.Zone == u.Zone;
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSecondsToUTC(t *testing.T) {
|
func TestSecondsToUTC(t *testing.T) {
|
||||||
|
|
@ -82,4 +82,3 @@ func TestSecondsToLocalTime(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ func (d *data) read(n int) []byte {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
p := d.p[0:n];
|
p := d.p[0:n];
|
||||||
d.p = d.p[n : len(d.p)];
|
d.p = d.p[n:len(d.p)];
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,8 @@
|
||||||
package unicode_test
|
package unicode_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing";
|
"testing";
|
||||||
. "unicode";
|
. "unicode";
|
||||||
)
|
)
|
||||||
|
|
||||||
type T struct {
|
type T struct {
|
||||||
|
|
@ -16,92 +16,91 @@ type T struct {
|
||||||
|
|
||||||
// Hand-chosen tests from Unicode 5.1.0, mostly to discover when new
|
// Hand-chosen tests from Unicode 5.1.0, mostly to discover when new
|
||||||
// scripts and categories arise.
|
// scripts and categories arise.
|
||||||
var inTest = []T {
|
var inTest = []T{
|
||||||
T{0x06e2, "Arabic"},
|
T{0x06e2, "Arabic"},
|
||||||
T{0x0567, "Armenian"},
|
T{0x0567, "Armenian"},
|
||||||
T{0x1b37, "Balinese"},
|
T{0x1b37, "Balinese"},
|
||||||
T{0x09c2, "Bengali"},
|
T{0x09c2, "Bengali"},
|
||||||
T{0x3115, "Bopomofo"},
|
T{0x3115, "Bopomofo"},
|
||||||
T{0x282d, "Braille"},
|
T{0x282d, "Braille"},
|
||||||
T{0x1a1a, "Buginese"},
|
T{0x1a1a, "Buginese"},
|
||||||
T{0x1747, "Buhid"},
|
T{0x1747, "Buhid"},
|
||||||
T{0x156d, "Canadian_Aboriginal"},
|
T{0x156d, "Canadian_Aboriginal"},
|
||||||
T{0x102a9, "Carian"},
|
T{0x102a9, "Carian"},
|
||||||
T{0xaa4d, "Cham"},
|
T{0xaa4d, "Cham"},
|
||||||
T{0x13c2, "Cherokee"},
|
T{0x13c2, "Cherokee"},
|
||||||
T{0x0020, "Common"},
|
T{0x0020, "Common"},
|
||||||
T{0x1d4a5, "Common"},
|
T{0x1d4a5, "Common"},
|
||||||
T{0x2cfc, "Coptic"},
|
T{0x2cfc, "Coptic"},
|
||||||
T{0x12420, "Cuneiform"},
|
T{0x12420, "Cuneiform"},
|
||||||
T{0x1080c, "Cypriot"},
|
T{0x1080c, "Cypriot"},
|
||||||
T{0xa663, "Cyrillic"},
|
T{0xa663, "Cyrillic"},
|
||||||
T{0x10430, "Deseret"},
|
T{0x10430, "Deseret"},
|
||||||
T{0x094a, "Devanagari"},
|
T{0x094a, "Devanagari"},
|
||||||
T{0x1271, "Ethiopic"},
|
T{0x1271, "Ethiopic"},
|
||||||
T{0x10fc, "Georgian"},
|
T{0x10fc, "Georgian"},
|
||||||
T{0x2c40, "Glagolitic"},
|
T{0x2c40, "Glagolitic"},
|
||||||
T{0x10347, "Gothic"},
|
T{0x10347, "Gothic"},
|
||||||
T{0x03ae, "Greek"},
|
T{0x03ae, "Greek"},
|
||||||
T{0x0abf, "Gujarati"},
|
T{0x0abf, "Gujarati"},
|
||||||
T{0x0a24, "Gurmukhi"},
|
T{0x0a24, "Gurmukhi"},
|
||||||
T{0x3028, "Han"},
|
T{0x3028, "Han"},
|
||||||
T{0x11b8, "Hangul"},
|
T{0x11b8, "Hangul"},
|
||||||
T{0x1727, "Hanunoo"},
|
T{0x1727, "Hanunoo"},
|
||||||
T{0x05a0, "Hebrew"},
|
T{0x05a0, "Hebrew"},
|
||||||
T{0x3058, "Hiragana"},
|
T{0x3058, "Hiragana"},
|
||||||
T{0x20e6, "Inherited"},
|
T{0x20e6, "Inherited"},
|
||||||
T{0x0cbd, "Kannada"},
|
T{0x0cbd, "Kannada"},
|
||||||
T{0x30a6, "Katakana"},
|
T{0x30a6, "Katakana"},
|
||||||
T{0xa928, "Kayah_Li"},
|
T{0xa928, "Kayah_Li"},
|
||||||
T{0x10a11, "Kharoshthi"},
|
T{0x10a11, "Kharoshthi"},
|
||||||
T{0x17c6, "Khmer"},
|
T{0x17c6, "Khmer"},
|
||||||
T{0x0eaa, "Lao"},
|
T{0x0eaa, "Lao"},
|
||||||
T{0x1d79, "Latin"},
|
T{0x1d79, "Latin"},
|
||||||
T{0x1c10, "Lepcha"},
|
T{0x1c10, "Lepcha"},
|
||||||
T{0x1930, "Limbu"},
|
T{0x1930, "Limbu"},
|
||||||
T{0x1003c, "Linear_B"},
|
T{0x1003c, "Linear_B"},
|
||||||
T{0x10290, "Lycian"},
|
T{0x10290, "Lycian"},
|
||||||
T{0x10930, "Lydian"},
|
T{0x10930, "Lydian"},
|
||||||
T{0x0d42, "Malayalam"},
|
T{0x0d42, "Malayalam"},
|
||||||
T{0x1822, "Mongolian"},
|
T{0x1822, "Mongolian"},
|
||||||
T{0x104c, "Myanmar"},
|
T{0x104c, "Myanmar"},
|
||||||
T{0x19c3, "New_Tai_Lue"},
|
T{0x19c3, "New_Tai_Lue"},
|
||||||
T{0x07f8, "Nko"},
|
T{0x07f8, "Nko"},
|
||||||
T{0x169b, "Ogham"},
|
T{0x169b, "Ogham"},
|
||||||
T{0x1c6a, "Ol_Chiki"},
|
T{0x1c6a, "Ol_Chiki"},
|
||||||
T{0x10310, "Old_Italic"},
|
T{0x10310, "Old_Italic"},
|
||||||
T{0x103c9, "Old_Persian"},
|
T{0x103c9, "Old_Persian"},
|
||||||
T{0x0b3e, "Oriya"},
|
T{0x0b3e, "Oriya"},
|
||||||
T{0x10491, "Osmanya"},
|
T{0x10491, "Osmanya"},
|
||||||
T{0xa860, "Phags_Pa"},
|
T{0xa860, "Phags_Pa"},
|
||||||
T{0x10918, "Phoenician"},
|
T{0x10918, "Phoenician"},
|
||||||
T{0xa949, "Rejang"},
|
T{0xa949, "Rejang"},
|
||||||
T{0x16c0, "Runic"},
|
T{0x16c0, "Runic"},
|
||||||
T{0xa892, "Saurashtra"},
|
T{0xa892, "Saurashtra"},
|
||||||
T{0x10463, "Shavian"},
|
T{0x10463, "Shavian"},
|
||||||
T{0x0dbd, "Sinhala"},
|
T{0x0dbd, "Sinhala"},
|
||||||
T{0x1ba3, "Sundanese"},
|
T{0x1ba3, "Sundanese"},
|
||||||
T{0xa803, "Syloti_Nagri"},
|
T{0xa803, "Syloti_Nagri"},
|
||||||
T{0x070f, "Syriac"},
|
T{0x070f, "Syriac"},
|
||||||
T{0x170f, "Tagalog"},
|
T{0x170f, "Tagalog"},
|
||||||
T{0x176f, "Tagbanwa"},
|
T{0x176f, "Tagbanwa"},
|
||||||
T{0x1972, "Tai_Le"},
|
T{0x1972, "Tai_Le"},
|
||||||
T{0x0bbf, "Tamil"},
|
T{0x0bbf, "Tamil"},
|
||||||
T{0x0c55, "Telugu"},
|
T{0x0c55, "Telugu"},
|
||||||
T{0x07a7, "Thaana"},
|
T{0x07a7, "Thaana"},
|
||||||
T{0x0e46, "Thai"},
|
T{0x0e46, "Thai"},
|
||||||
T{0x0f36, "Tibetan"},
|
T{0x0f36, "Tibetan"},
|
||||||
T{0x2d55, "Tifinagh"},
|
T{0x2d55, "Tifinagh"},
|
||||||
T{0x10388, "Ugaritic"},
|
T{0x10388, "Ugaritic"},
|
||||||
T{0xa60e, "Vai"},
|
T{0xa60e, "Vai"},
|
||||||
T{0xa216, "Yi"},
|
T{0xa216, "Yi"},
|
||||||
}
|
}
|
||||||
|
|
||||||
var outTest = []T { // not really worth being thorough
|
var outTest = []T{ // not really worth being thorough
|
||||||
T{0x20, "Telugu"}
|
T{0x20, "Telugu"}}
|
||||||
}
|
|
||||||
|
|
||||||
var inCategoryTest = []T {
|
var inCategoryTest = []T{
|
||||||
T{0x0081, "Cc"},
|
T{0x0081, "Cc"},
|
||||||
T{0x17b4, "Cf"},
|
T{0x17b4, "Cf"},
|
||||||
T{0xf0000, "Co"},
|
T{0xf0000, "Co"},
|
||||||
|
|
@ -134,7 +133,7 @@ var inCategoryTest = []T {
|
||||||
T{0x04aa, "letter"},
|
T{0x04aa, "letter"},
|
||||||
}
|
}
|
||||||
|
|
||||||
var inPropTest = []T {
|
var inPropTest = []T{
|
||||||
T{0x0046, "ASCII_Hex_Digit"},
|
T{0x0046, "ASCII_Hex_Digit"},
|
||||||
T{0x200F, "Bidi_Control"},
|
T{0x200F, "Bidi_Control"},
|
||||||
T{0x2212, "Dash"},
|
T{0x2212, "Dash"},
|
||||||
|
|
@ -170,18 +169,18 @@ var inPropTest = []T {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScripts(t *testing.T) {
|
func TestScripts(t *testing.T) {
|
||||||
notTested := make(map[string] bool);
|
notTested := make(map[string]bool);
|
||||||
for k := range Scripts {
|
for k := range Scripts {
|
||||||
notTested[k] = true
|
notTested[k] = true;
|
||||||
}
|
}
|
||||||
for _, test := range inTest {
|
for _, test := range inTest {
|
||||||
if _, ok := Scripts[test.script]; !ok {
|
if _, ok := Scripts[test.script]; !ok {
|
||||||
t.Fatal(test.script, "not a known script")
|
t.Fatal(test.script, "not a known script");
|
||||||
}
|
}
|
||||||
if !Is(Scripts[test.script], test.rune) {
|
if !Is(Scripts[test.script], test.rune) {
|
||||||
t.Errorf("IsScript(%#x, %s) = false, want true\n", test.rune, test.script);
|
t.Errorf("IsScript(%#x, %s) = false, want true\n", test.rune, test.script);
|
||||||
}
|
}
|
||||||
notTested[test.script] = false, false
|
notTested[test.script] = false, false;
|
||||||
}
|
}
|
||||||
for _, test := range outTest {
|
for _, test := range outTest {
|
||||||
if Is(Scripts[test.script], test.rune) {
|
if Is(Scripts[test.script], test.rune) {
|
||||||
|
|
@ -189,44 +188,44 @@ func TestScripts(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for k := range notTested {
|
for k := range notTested {
|
||||||
t.Error("not tested:", k)
|
t.Error("not tested:", k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCategories(t *testing.T) {
|
func TestCategories(t *testing.T) {
|
||||||
notTested := make(map[string] bool);
|
notTested := make(map[string]bool);
|
||||||
for k := range Categories {
|
for k := range Categories {
|
||||||
notTested[k] = true
|
notTested[k] = true;
|
||||||
}
|
}
|
||||||
for _, test := range inCategoryTest {
|
for _, test := range inCategoryTest {
|
||||||
if _, ok := Categories[test.script]; !ok {
|
if _, ok := Categories[test.script]; !ok {
|
||||||
t.Fatal(test.script, "not a known category")
|
t.Fatal(test.script, "not a known category");
|
||||||
}
|
}
|
||||||
if !Is(Categories[test.script], test.rune) {
|
if !Is(Categories[test.script], test.rune) {
|
||||||
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
|
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
|
||||||
}
|
}
|
||||||
notTested[test.script] = false, false
|
notTested[test.script] = false, false;
|
||||||
}
|
}
|
||||||
for k := range notTested {
|
for k := range notTested {
|
||||||
t.Error("not tested:", k)
|
t.Error("not tested:", k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProperties(t *testing.T) {
|
func TestProperties(t *testing.T) {
|
||||||
notTested := make(map[string] bool);
|
notTested := make(map[string]bool);
|
||||||
for k := range Properties {
|
for k := range Properties {
|
||||||
notTested[k] = true
|
notTested[k] = true;
|
||||||
}
|
}
|
||||||
for _, test := range inPropTest {
|
for _, test := range inPropTest {
|
||||||
if _, ok := Properties[test.script]; !ok {
|
if _, ok := Properties[test.script]; !ok {
|
||||||
t.Fatal(test.script, "not a known prop")
|
t.Fatal(test.script, "not a known prop");
|
||||||
}
|
}
|
||||||
if !Is(Properties[test.script], test.rune) {
|
if !Is(Properties[test.script], test.rune) {
|
||||||
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
|
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
|
||||||
}
|
}
|
||||||
notTested[test.script] = false, false
|
notTested[test.script] = false, false;
|
||||||
}
|
}
|
||||||
for k := range notTested {
|
for k := range notTested {
|
||||||
t.Error("not tested:", k)
|
t.Error("not tested:", k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,28 +10,28 @@ import "unicode" // only needed for a couple of constants
|
||||||
|
|
||||||
// Numbers fundamental to the encoding.
|
// Numbers fundamental to the encoding.
|
||||||
const (
|
const (
|
||||||
RuneError = unicode.ReplacementChar; // the "error" Rune or "replacement character".
|
RuneError = unicode.ReplacementChar; // the "error" Rune or "replacement character".
|
||||||
RuneSelf = 0x80; // characters below Runeself are represented as themselves in a single byte.
|
RuneSelf = 0x80; // characters below Runeself are represented as themselves in a single byte.
|
||||||
UTFMax = 4; // maximum number of bytes of a UTF-8 encoded Unicode character.
|
UTFMax = 4; // maximum number of bytes of a UTF-8 encoded Unicode character.
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
_T1 = 0x00; // 0000 0000
|
_T1 = 0x00; // 0000 0000
|
||||||
_Tx = 0x80; // 1000 0000
|
_Tx = 0x80; // 1000 0000
|
||||||
_T2 = 0xC0; // 1100 0000
|
_T2 = 0xC0; // 1100 0000
|
||||||
_T3 = 0xE0; // 1110 0000
|
_T3 = 0xE0; // 1110 0000
|
||||||
_T4 = 0xF0; // 1111 0000
|
_T4 = 0xF0; // 1111 0000
|
||||||
_T5 = 0xF8; // 1111 1000
|
_T5 = 0xF8; // 1111 1000
|
||||||
|
|
||||||
_Maskx = 0x3F; // 0011 1111
|
_Maskx = 0x3F; // 0011 1111
|
||||||
_Mask2 = 0x1F; // 0001 1111
|
_Mask2 = 0x1F; // 0001 1111
|
||||||
_Mask3 = 0x0F; // 0000 1111
|
_Mask3 = 0x0F; // 0000 1111
|
||||||
_Mask4 = 0x07; // 0000 0111
|
_Mask4 = 0x07; // 0000 0111
|
||||||
|
|
||||||
_Rune1Max = 1<<7 - 1;
|
_Rune1Max = 1<<7 - 1;
|
||||||
_Rune2Max = 1<<11 - 1;
|
_Rune2Max = 1<<11 - 1;
|
||||||
_Rune3Max = 1<<16 - 1;
|
_Rune3Max = 1<<16 - 1;
|
||||||
_Rune4Max = 1<<21 - 1;
|
_Rune4Max = 1<<21 - 1;
|
||||||
)
|
)
|
||||||
|
|
||||||
func decodeRuneInternal(p []byte) (rune, size int, short bool) {
|
func decodeRuneInternal(p []byte) (rune, size int, short bool) {
|
||||||
|
|
@ -43,70 +43,70 @@ func decodeRuneInternal(p []byte) (rune, size int, short bool) {
|
||||||
|
|
||||||
// 1-byte, 7-bit sequence?
|
// 1-byte, 7-bit sequence?
|
||||||
if c0 < _Tx {
|
if c0 < _Tx {
|
||||||
return int(c0), 1, false
|
return int(c0), 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// unexpected continuation byte?
|
// unexpected continuation byte?
|
||||||
if c0 < _T2 {
|
if c0 < _T2 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need first continuation byte
|
// need first continuation byte
|
||||||
if n < 2 {
|
if n < 2 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c1 := p[1];
|
c1 := p[1];
|
||||||
if c1 < _Tx || _T2 <= c1 {
|
if c1 < _Tx || _T2 <= c1 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2-byte, 11-bit sequence?
|
// 2-byte, 11-bit sequence?
|
||||||
if c0 < _T3 {
|
if c0 < _T3 {
|
||||||
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
|
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
|
||||||
if rune <= _Rune1Max {
|
if rune <= _Rune1Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 2, false
|
return rune, 2, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need second continuation byte
|
// need second continuation byte
|
||||||
if n < 3 {
|
if n < 3 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c2 := p[2];
|
c2 := p[2];
|
||||||
if c2 < _Tx || _T2 <= c2 {
|
if c2 < _Tx || _T2 <= c2 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3-byte, 16-bit sequence?
|
// 3-byte, 16-bit sequence?
|
||||||
if c0 < _T4 {
|
if c0 < _T4 {
|
||||||
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
|
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
|
||||||
if rune <= _Rune2Max {
|
if rune <= _Rune2Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 3, false
|
return rune, 3, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need third continuation byte
|
// need third continuation byte
|
||||||
if n < 4 {
|
if n < 4 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c3 := p[3];
|
c3 := p[3];
|
||||||
if c3 < _Tx || _T2 <= c3 {
|
if c3 < _Tx || _T2 <= c3 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4-byte, 21-bit sequence?
|
// 4-byte, 21-bit sequence?
|
||||||
if c0 < _T5 {
|
if c0 < _T5 {
|
||||||
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
|
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
|
||||||
if rune <= _Rune3Max {
|
if rune <= _Rune3Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 4, false
|
return rune, 4, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// error
|
// error
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
func decodeRuneInStringInternal(s string) (rune, size int, short bool) {
|
func decodeRuneInStringInternal(s string) (rune, size int, short bool) {
|
||||||
|
|
@ -118,83 +118,83 @@ func decodeRuneInStringInternal(s string) (rune, size int, short bool) {
|
||||||
|
|
||||||
// 1-byte, 7-bit sequence?
|
// 1-byte, 7-bit sequence?
|
||||||
if c0 < _Tx {
|
if c0 < _Tx {
|
||||||
return int(c0), 1, false
|
return int(c0), 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// unexpected continuation byte?
|
// unexpected continuation byte?
|
||||||
if c0 < _T2 {
|
if c0 < _T2 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need first continuation byte
|
// need first continuation byte
|
||||||
if n < 2 {
|
if n < 2 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c1 := s[1];
|
c1 := s[1];
|
||||||
if c1 < _Tx || _T2 <= c1 {
|
if c1 < _Tx || _T2 <= c1 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2-byte, 11-bit sequence?
|
// 2-byte, 11-bit sequence?
|
||||||
if c0 < _T3 {
|
if c0 < _T3 {
|
||||||
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
|
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
|
||||||
if rune <= _Rune1Max {
|
if rune <= _Rune1Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 2, false
|
return rune, 2, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need second continuation byte
|
// need second continuation byte
|
||||||
if n < 3 {
|
if n < 3 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c2 := s[2];
|
c2 := s[2];
|
||||||
if c2 < _Tx || _T2 <= c2 {
|
if c2 < _Tx || _T2 <= c2 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3-byte, 16-bit sequence?
|
// 3-byte, 16-bit sequence?
|
||||||
if c0 < _T4 {
|
if c0 < _T4 {
|
||||||
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
|
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
|
||||||
if rune <= _Rune2Max {
|
if rune <= _Rune2Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 3, false
|
return rune, 3, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// need third continuation byte
|
// need third continuation byte
|
||||||
if n < 4 {
|
if n < 4 {
|
||||||
return RuneError, 1, true
|
return RuneError, 1, true;
|
||||||
}
|
}
|
||||||
c3 := s[3];
|
c3 := s[3];
|
||||||
if c3 < _Tx || _T2 <= c3 {
|
if c3 < _Tx || _T2 <= c3 {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4-byte, 21-bit sequence?
|
// 4-byte, 21-bit sequence?
|
||||||
if c0 < _T5 {
|
if c0 < _T5 {
|
||||||
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
|
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
|
||||||
if rune <= _Rune3Max {
|
if rune <= _Rune3Max {
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
return rune, 4, false
|
return rune, 4, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// error
|
// error
|
||||||
return RuneError, 1, false
|
return RuneError, 1, false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FullRune reports whether the bytes in p begin with a full UTF-8 encoding of a rune.
|
// FullRune reports whether the bytes in p begin with a full UTF-8 encoding of a rune.
|
||||||
// An invalid encoding is considered a full Rune since it will convert as a width-1 error rune.
|
// An invalid encoding is considered a full Rune since it will convert as a width-1 error rune.
|
||||||
func FullRune(p []byte) bool {
|
func FullRune(p []byte) bool {
|
||||||
_, _, short := decodeRuneInternal(p);
|
_, _, short := decodeRuneInternal(p);
|
||||||
return !short
|
return !short;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FullRuneInString is like FullRune but its input is a string.
|
// FullRuneInString is like FullRune but its input is a string.
|
||||||
func FullRuneInString(s string) bool {
|
func FullRuneInString(s string) bool {
|
||||||
_, _, short := decodeRuneInStringInternal(s);
|
_, _, short := decodeRuneInStringInternal(s);
|
||||||
return !short
|
return !short;
|
||||||
}
|
}
|
||||||
|
|
||||||
// DecodeRune unpacks the first UTF-8 encoding in p and returns the rune and its width in bytes.
|
// DecodeRune unpacks the first UTF-8 encoding in p and returns the rune and its width in bytes.
|
||||||
|
|
@ -233,23 +233,23 @@ func EncodeRune(rune int, p []byte) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
if rune <= _Rune2Max {
|
if rune <= _Rune2Max {
|
||||||
p[0] = _T2 | byte(rune>>6);
|
p[0] = _T2|byte(rune>>6);
|
||||||
p[1] = _Tx | byte(rune)&_Maskx;
|
p[1] = _Tx | byte(rune)&_Maskx;
|
||||||
return 2;
|
return 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
if rune > unicode.MaxRune {
|
if rune > unicode.MaxRune {
|
||||||
rune = RuneError
|
rune = RuneError;
|
||||||
}
|
}
|
||||||
|
|
||||||
if rune <= _Rune3Max {
|
if rune <= _Rune3Max {
|
||||||
p[0] = _T3 | byte(rune>>12);
|
p[0] = _T3|byte(rune>>12);
|
||||||
p[1] = _Tx | byte(rune>>6)&_Maskx;
|
p[1] = _Tx | byte(rune>>6)&_Maskx;
|
||||||
p[2] = _Tx | byte(rune)&_Maskx;
|
p[2] = _Tx | byte(rune)&_Maskx;
|
||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
p[0] = _T4 | byte(rune>>18);
|
p[0] = _T4|byte(rune>>18);
|
||||||
p[1] = _Tx | byte(rune>>12)&_Maskx;
|
p[1] = _Tx | byte(rune>>12)&_Maskx;
|
||||||
p[2] = _Tx | byte(rune>>6)&_Maskx;
|
p[2] = _Tx | byte(rune>>6)&_Maskx;
|
||||||
p[3] = _Tx | byte(rune)&_Maskx;
|
p[3] = _Tx | byte(rune)&_Maskx;
|
||||||
|
|
@ -292,5 +292,5 @@ func RuneCountInString(s string) int {
|
||||||
// an encoded rune. Second and subsequent bytes always have the top
|
// an encoded rune. Second and subsequent bytes always have the top
|
||||||
// two bits set to 10.
|
// two bits set to 10.
|
||||||
func RuneStart(b byte) bool {
|
func RuneStart(b byte) bool {
|
||||||
return b & 0xC0 != 0x80
|
return b&0xC0 != 0x80;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -120,7 +120,7 @@ func TestDecodeRune(t *testing.T) {
|
||||||
if rune != RuneError || size != wantsize {
|
if rune != RuneError || size != wantsize {
|
||||||
t.Errorf("DecodeRune(%q) = 0x%04x, %d want 0x%04x, %d", b[0 : len(b)-1], rune, size, RuneError, wantsize);
|
t.Errorf("DecodeRune(%q) = 0x%04x, %d want 0x%04x, %d", b[0 : len(b)-1], rune, size, RuneError, wantsize);
|
||||||
}
|
}
|
||||||
s = m.str[0 : len(m.str) - 1];
|
s = m.str[0 : len(m.str)-1];
|
||||||
rune, size = DecodeRuneInString(s);
|
rune, size = DecodeRuneInString(s);
|
||||||
if rune != RuneError || size != wantsize {
|
if rune != RuneError || size != wantsize {
|
||||||
t.Errorf("DecodeRuneInString(%q) = 0x%04x, %d want 0x%04x, %d", s, rune, size, RuneError, wantsize);
|
t.Errorf("DecodeRuneInString(%q) = 0x%04x, %d want 0x%04x, %d", s, rune, size, RuneError, wantsize);
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,7 @@ import (
|
||||||
|
|
||||||
// A SyntaxError represents a syntax error in the XML input stream.
|
// A SyntaxError represents a syntax error in the XML input stream.
|
||||||
type SyntaxError string
|
type SyntaxError string
|
||||||
|
|
||||||
func (e SyntaxError) String() string {
|
func (e SyntaxError) String() string {
|
||||||
return "XML syntax error: " + string(e);
|
return "XML syntax error: " + string(e);
|
||||||
}
|
}
|
||||||
|
|
@ -42,8 +43,8 @@ type Name struct {
|
||||||
|
|
||||||
// An Attr represents an attribute in an XML element (Name=Value).
|
// An Attr represents an attribute in an XML element (Name=Value).
|
||||||
type Attr struct {
|
type Attr struct {
|
||||||
Name Name;
|
Name Name;
|
||||||
Value string;
|
Value string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Token is an interface holding one of the token types:
|
// A Token is an interface holding one of the token types:
|
||||||
|
|
@ -52,12 +53,12 @@ type Token interface{}
|
||||||
|
|
||||||
// A StartElement represents an XML start element.
|
// A StartElement represents an XML start element.
|
||||||
type StartElement struct {
|
type StartElement struct {
|
||||||
Name Name;
|
Name Name;
|
||||||
Attr []Attr;
|
Attr []Attr;
|
||||||
}
|
}
|
||||||
|
|
||||||
// An EndElement represents an XML end element.
|
// An EndElement represents an XML end element.
|
||||||
type EndElement struct {
|
type EndElement struct {
|
||||||
Name Name;
|
Name Name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -86,8 +87,8 @@ func (c Comment) Copy() Comment {
|
||||||
|
|
||||||
// A ProcInst represents an XML processing instruction of the form <?target inst?>
|
// A ProcInst represents an XML processing instruction of the form <?target inst?>
|
||||||
type ProcInst struct {
|
type ProcInst struct {
|
||||||
Target string;
|
Target string;
|
||||||
Inst []byte;
|
Inst []byte;
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ProcInst) Copy() ProcInst {
|
func (p ProcInst) Copy() ProcInst {
|
||||||
|
|
@ -104,23 +105,23 @@ func (d Directive) Copy() Directive {
|
||||||
}
|
}
|
||||||
|
|
||||||
type readByter interface {
|
type readByter interface {
|
||||||
ReadByte() (b byte, err os.Error)
|
ReadByte() (b byte, err os.Error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Parser represents an XML parser reading a particular input stream.
|
// A Parser represents an XML parser reading a particular input stream.
|
||||||
// The parser assumes that its input is encoded in UTF-8.
|
// The parser assumes that its input is encoded in UTF-8.
|
||||||
type Parser struct {
|
type Parser struct {
|
||||||
r readByter;
|
r readByter;
|
||||||
buf bytes.Buffer;
|
buf bytes.Buffer;
|
||||||
stk *stack;
|
stk *stack;
|
||||||
free *stack;
|
free *stack;
|
||||||
needClose bool;
|
needClose bool;
|
||||||
toClose Name;
|
toClose Name;
|
||||||
nextByte int;
|
nextByte int;
|
||||||
ns map[string]string;
|
ns map[string]string;
|
||||||
err os.Error;
|
err os.Error;
|
||||||
line int;
|
line int;
|
||||||
tmp [32]byte;
|
tmp [32]byte;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewParser creates a new XML parser reading from r.
|
// NewParser creates a new XML parser reading from r.
|
||||||
|
|
@ -230,14 +231,14 @@ func (p *Parser) translate(n *Name, isElementName bool) {
|
||||||
// ending a given tag are *below* it on the stack, which is
|
// ending a given tag are *below* it on the stack, which is
|
||||||
// more work but forced on us by XML.
|
// more work but forced on us by XML.
|
||||||
type stack struct {
|
type stack struct {
|
||||||
next *stack;
|
next *stack;
|
||||||
kind int;
|
kind int;
|
||||||
name Name;
|
name Name;
|
||||||
ok bool;
|
ok bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
stkStart = iota;
|
stkStart = iota;
|
||||||
stkNs;
|
stkNs;
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -388,7 +389,7 @@ func (p *Parser) RawToken() (Token, os.Error) {
|
||||||
b0 = b;
|
b0 = b;
|
||||||
}
|
}
|
||||||
data := p.buf.Bytes();
|
data := p.buf.Bytes();
|
||||||
data = data[0:len(data)-2]; // chop ?>
|
data = data[0 : len(data)-2]; // chop ?>
|
||||||
return ProcInst{target, data}, nil;
|
return ProcInst{target, data}, nil;
|
||||||
|
|
||||||
case '!':
|
case '!':
|
||||||
|
|
@ -397,8 +398,8 @@ func (p *Parser) RawToken() (Token, os.Error) {
|
||||||
return nil, p.err;
|
return nil, p.err;
|
||||||
}
|
}
|
||||||
switch b {
|
switch b {
|
||||||
case '-': // <!-
|
case '-': // <!-
|
||||||
// Probably <!-- for a comment.
|
// Probably <!-- for a comment.
|
||||||
if b, ok = p.getc(); !ok {
|
if b, ok = p.getc(); !ok {
|
||||||
return nil, p.err;
|
return nil, p.err;
|
||||||
}
|
}
|
||||||
|
|
@ -423,11 +424,11 @@ func (p *Parser) RawToken() (Token, os.Error) {
|
||||||
b0, b1 = b1, b;
|
b0, b1 = b1, b;
|
||||||
}
|
}
|
||||||
data := p.buf.Bytes();
|
data := p.buf.Bytes();
|
||||||
data = data[0:len(data)-3]; // chop -->
|
data = data[0 : len(data)-3]; // chop -->
|
||||||
return Comment(data), nil;
|
return Comment(data), nil;
|
||||||
|
|
||||||
case '[': // <![
|
case '[': // <![
|
||||||
// Probably <![CDATA[.
|
// Probably <![CDATA[.
|
||||||
for i := 0; i < 7; i++ {
|
for i := 0; i < 7; i++ {
|
||||||
if b, ok = p.getc(); !ok {
|
if b, ok = p.getc(); !ok {
|
||||||
return nil, p.err;
|
return nil, p.err;
|
||||||
|
|
@ -465,9 +466,9 @@ func (p *Parser) RawToken() (Token, os.Error) {
|
||||||
p.ungetc(b);
|
p.ungetc(b);
|
||||||
|
|
||||||
var (
|
var (
|
||||||
name Name;
|
name Name;
|
||||||
empty bool;
|
empty bool;
|
||||||
attr []Attr;
|
attr []Attr;
|
||||||
)
|
)
|
||||||
if name, ok = p.nsname(); !ok {
|
if name, ok = p.nsname(); !ok {
|
||||||
if p.err == nil {
|
if p.err == nil {
|
||||||
|
|
@ -506,7 +507,7 @@ func (p *Parser) RawToken() (Token, os.Error) {
|
||||||
}
|
}
|
||||||
attr = nattr;
|
attr = nattr;
|
||||||
}
|
}
|
||||||
attr = attr[0:n+1];
|
attr = attr[0 : n+1];
|
||||||
a := &attr[n];
|
a := &attr[n];
|
||||||
if a.Name, ok = p.nsname(); !ok {
|
if a.Name, ok = p.nsname(); !ok {
|
||||||
if p.err == nil {
|
if p.err == nil {
|
||||||
|
|
@ -591,7 +592,7 @@ func (p *Parser) ungetc(b byte) {
|
||||||
p.nextByte = int(b);
|
p.nextByte = int(b);
|
||||||
}
|
}
|
||||||
|
|
||||||
var entity = map[string]int {
|
var entity = map[string]int{
|
||||||
"lt": '<',
|
"lt": '<',
|
||||||
"gt": '>',
|
"gt": '>',
|
||||||
"amp": '&',
|
"amp": '&',
|
||||||
|
|
@ -688,7 +689,7 @@ Input:
|
||||||
b0, b1 = b1, b;
|
b0, b1 = b1, b;
|
||||||
}
|
}
|
||||||
data := p.buf.Bytes();
|
data := p.buf.Bytes();
|
||||||
data = data[0:len(data)-trunc];
|
data = data[0 : len(data)-trunc];
|
||||||
|
|
||||||
// Must rewrite \r and \r\n into \n.
|
// Must rewrite \r and \r\n into \n.
|
||||||
w := 0;
|
w := 0;
|
||||||
|
|
@ -718,7 +719,7 @@ func (p *Parser) nsname() (name Name, ok bool) {
|
||||||
name.Local = s;
|
name.Local = s;
|
||||||
} else {
|
} else {
|
||||||
name.Space = s[0:i];
|
name.Space = s[0:i];
|
||||||
name.Local = s[i+1:len(s)];
|
name.Local = s[i+1 : len(s)];
|
||||||
}
|
}
|
||||||
return name, true;
|
return name, true;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue