2009-06-16 10:14:06 -06:00
|
|
|
// Copyright 2009 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2009-10-27 11:34:31 -06:00
|
|
|
"bytes";
|
|
|
|
"flag";
|
|
|
|
"fmt";
|
|
|
|
"go/ast";
|
|
|
|
"go/doc";
|
|
|
|
"go/parser";
|
|
|
|
"go/printer";
|
|
|
|
"go/scanner";
|
|
|
|
"go/token";
|
|
|
|
"http";
|
|
|
|
"io";
|
|
|
|
"log";
|
|
|
|
"os";
|
|
|
|
pathutil "path";
|
|
|
|
"strings";
|
|
|
|
"sync";
|
|
|
|
"template";
|
|
|
|
"time";
|
2009-11-08 22:48:51 -07:00
|
|
|
"unicode";
|
2009-11-08 01:40:43 -07:00
|
|
|
"utf8";
|
2009-06-16 10:14:06 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Support types
|
2009-08-28 12:28:05 -06:00
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// An RWValue wraps a value and permits mutually exclusive
|
|
|
|
// access to it and records the time the value was last set.
|
|
|
|
type RWValue struct {
|
2009-10-27 11:34:31 -06:00
|
|
|
mutex sync.RWMutex;
|
|
|
|
value interface{};
|
|
|
|
timestamp int64; // time of last set(), in seconds since epoch
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (v *RWValue) set(value interface{}) {
|
|
|
|
v.mutex.Lock();
|
|
|
|
v.value = value;
|
|
|
|
v.timestamp = time.Seconds();
|
|
|
|
v.mutex.Unlock();
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (v *RWValue) get() (interface{}, int64) {
|
|
|
|
v.mutex.RLock();
|
|
|
|
defer v.mutex.RUnlock();
|
|
|
|
return v.value, v.timestamp;
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Globals
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
type delayTime struct {
|
|
|
|
RWValue;
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (dt *delayTime) backoff(max int) {
|
|
|
|
dt.mutex.Lock();
|
|
|
|
v := dt.value.(int) * 2;
|
|
|
|
if v > max {
|
2009-11-09 13:07:39 -07:00
|
|
|
v = max
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
dt.value = v;
|
|
|
|
dt.mutex.Unlock();
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var (
|
2009-10-08 16:14:54 -06:00
|
|
|
verbose = flag.Bool("v", false, "verbose mode");
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
// file system roots
|
2009-10-08 16:14:54 -06:00
|
|
|
goroot string;
|
2009-11-03 20:40:26 -07:00
|
|
|
cmdroot = flag.String("cmdroot", "src/cmd", "root command source directory (if unrooted, relative to goroot)");
|
2009-10-08 16:14:54 -06:00
|
|
|
pkgroot = flag.String("pkgroot", "src/pkg", "root package source directory (if unrooted, relative to goroot)");
|
|
|
|
tmplroot = flag.String("tmplroot", "lib/godoc", "root template directory (if unrooted, relative to goroot)");
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
// layout control
|
2009-10-08 16:14:54 -06:00
|
|
|
tabwidth = flag.Int("tabwidth", 4, "tab width");
|
2009-06-16 10:14:06 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2009-11-05 23:25:46 -07:00
|
|
|
var fsTree RWValue // *Directory tree of packages, updated with each sync
|
2009-11-03 20:40:26 -07:00
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
func init() {
|
2009-06-26 21:28:41 -06:00
|
|
|
goroot = os.Getenv("GOROOT");
|
2009-07-02 10:47:25 -06:00
|
|
|
if goroot == "" {
|
2009-11-09 13:07:39 -07:00
|
|
|
goroot = pathutil.Join(os.Getenv("HOME"), "go")
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
flag.StringVar(&goroot, "goroot", goroot, "Go root directory");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
2009-10-28 17:19:09 -06:00
|
|
|
// Predicates and small utility functions
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
func isGoFile(dir *os.Dir) bool {
|
2009-10-08 16:14:54 -06:00
|
|
|
return dir.IsRegular() &&
|
|
|
|
!strings.HasPrefix(dir.Name, ".") && // ignore .files
|
2009-11-09 13:07:39 -07:00
|
|
|
pathutil.Ext(dir.Name) == ".go"
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isPkgFile(dir *os.Dir) bool {
|
|
|
|
return isGoFile(dir) &&
|
2009-11-09 13:07:39 -07:00
|
|
|
!strings.HasSuffix(dir.Name, "_test.go") // ignore test files
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isPkgDir(dir *os.Dir) bool {
|
2009-11-09 13:07:39 -07:00
|
|
|
return dir.IsDirectory() && len(dir.Name) > 0 && dir.Name[0] != '_'
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
func pkgName(filename string) string {
|
|
|
|
file, err := parse(filename, parser.PackageClauseOnly);
|
|
|
|
if err != nil || file == nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return ""
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
|
|
|
return file.Name.Value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-28 17:19:09 -06:00
|
|
|
func htmlEscape(s string) string {
|
|
|
|
var buf bytes.Buffer;
|
2009-11-08 22:46:20 -07:00
|
|
|
template.HTMLEscape(&buf, strings.Bytes(s));
|
2009-10-28 17:19:09 -06:00
|
|
|
return buf.String();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-07 14:17:53 -07:00
|
|
|
func firstSentence(s string) string {
|
2009-11-08 22:48:51 -07:00
|
|
|
i := -1; // index+1 of first period
|
|
|
|
j := -1; // index+1 of first period that is followed by white space
|
|
|
|
prev := 'A';
|
|
|
|
for k, ch := range s {
|
2009-11-09 22:09:34 -07:00
|
|
|
k1 := k + 1;
|
2009-11-08 19:19:06 -07:00
|
|
|
if ch == '.' {
|
2009-11-08 22:48:51 -07:00
|
|
|
if i < 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
i = k1 // first period
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
|
|
|
if k1 < len(s) && s[k1] <= ' ' {
|
|
|
|
if j < 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
j = k1 // first period followed by white space
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
|
|
|
if !unicode.IsUpper(prev) {
|
|
|
|
j = k1;
|
|
|
|
break;
|
|
|
|
}
|
2009-11-08 19:19:06 -07:00
|
|
|
}
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
2009-11-08 22:48:51 -07:00
|
|
|
prev = ch;
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
2009-11-08 22:48:51 -07:00
|
|
|
|
|
|
|
if j < 0 {
|
|
|
|
// use the next best period
|
|
|
|
j = i;
|
|
|
|
if j < 0 {
|
|
|
|
// no period at all, use the entire string
|
2009-11-09 13:07:39 -07:00
|
|
|
j = len(s)
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
2009-11-08 19:19:06 -07:00
|
|
|
}
|
2009-11-08 22:48:51 -07:00
|
|
|
|
|
|
|
return s[0:j];
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-02 10:25:39 -07:00
|
|
|
// ----------------------------------------------------------------------------
|
2009-11-02 23:44:01 -07:00
|
|
|
// Package directories
|
2009-11-02 10:25:39 -07:00
|
|
|
|
|
|
|
type Directory struct {
|
2009-11-08 17:47:32 -07:00
|
|
|
Depth int;
|
2009-11-05 23:25:46 -07:00
|
|
|
Path string; // includes Name
|
|
|
|
Name string;
|
2009-11-08 17:47:32 -07:00
|
|
|
Text string; // package documentation, if any
|
|
|
|
Dirs []*Directory; // subdirectories
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
func newDirTree(path, name string, depth, maxDepth int) *Directory {
|
|
|
|
if depth >= maxDepth {
|
2009-11-02 23:44:01 -07:00
|
|
|
// return a dummy directory so that the parent directory
|
|
|
|
// doesn't get discarded just because we reached the max
|
|
|
|
// directory depth
|
2009-11-09 13:07:39 -07:00
|
|
|
return &Directory{depth, path, name, "", nil}
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
|
2009-11-05 23:25:46 -07:00
|
|
|
list, _ := io.ReadDir(path); // ignore errors
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// determine number of subdirectories and package files
|
|
|
|
ndirs := 0;
|
|
|
|
nfiles := 0;
|
2009-11-07 14:17:53 -07:00
|
|
|
text := "";
|
2009-11-02 10:25:39 -07:00
|
|
|
for _, d := range list {
|
2009-11-02 23:44:01 -07:00
|
|
|
switch {
|
|
|
|
case isPkgDir(d):
|
2009-11-09 13:07:39 -07:00
|
|
|
ndirs++
|
2009-11-02 23:44:01 -07:00
|
|
|
case isPkgFile(d):
|
|
|
|
nfiles++;
|
2009-11-07 14:17:53 -07:00
|
|
|
if text == "" {
|
|
|
|
// no package documentation yet; take the first found
|
|
|
|
file, err := parser.ParseFile(pathutil.Join(path, d.Name), nil,
|
2009-11-09 22:09:34 -07:00
|
|
|
parser.ParseComments|parser.PackageClauseOnly);
|
2009-11-08 17:47:32 -07:00
|
|
|
if err == nil &&
|
|
|
|
// Also accept fakePkgName, so we get synopses for commmands.
|
|
|
|
// Note: This may lead to incorrect results if there is a
|
|
|
|
// (left-over) "documentation" package somewhere in a package
|
|
|
|
// directory of different name, but this is very unlikely and
|
|
|
|
// against current conventions.
|
|
|
|
(file.Name.Value == name || file.Name.Value == fakePkgName) &&
|
|
|
|
file.Doc != nil {
|
2009-11-07 14:17:53 -07:00
|
|
|
// found documentation; extract a synopsys
|
2009-11-09 13:07:39 -07:00
|
|
|
text = firstSentence(doc.CommentText(file.Doc))
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// create subdirectory tree
|
|
|
|
var dirs []*Directory;
|
|
|
|
if ndirs > 0 {
|
|
|
|
dirs = make([]*Directory, ndirs);
|
2009-11-02 10:25:39 -07:00
|
|
|
i := 0;
|
|
|
|
for _, d := range list {
|
|
|
|
if isPkgDir(d) {
|
2009-11-08 17:47:32 -07:00
|
|
|
dd := newDirTree(pathutil.Join(path, d.Name), d.Name, depth+1, maxDepth);
|
2009-11-02 23:44:01 -07:00
|
|
|
if dd != nil {
|
|
|
|
dirs[i] = dd;
|
|
|
|
i++;
|
|
|
|
}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
dirs = dirs[0:i];
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// if there are no package files and no subdirectories
|
|
|
|
// (with package files), ignore the directory
|
|
|
|
if nfiles == 0 && len(dirs) == 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
return &Directory{depth, path, name, text, dirs};
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// newDirectory creates a new package directory tree with at most maxDepth
|
2009-11-03 20:40:26 -07:00
|
|
|
// levels, anchored at root which is relative to goroot. The result tree
|
2009-11-02 23:44:01 -07:00
|
|
|
// only contains directories that contain package files or that contain
|
|
|
|
// subdirectories containing package files (transitively).
|
|
|
|
//
|
2009-11-08 17:47:32 -07:00
|
|
|
func newDirectory(root string, maxDepth int) *Directory {
|
2009-11-03 20:40:26 -07:00
|
|
|
d, err := os.Lstat(root);
|
2009-11-02 23:44:01 -07:00
|
|
|
if err != nil || !isPkgDir(d) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-08 17:47:32 -07:00
|
|
|
return newDirTree(root, d.Name, 0, maxDepth);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (dir *Directory) walk(c chan<- *Directory, skipRoot bool) {
|
|
|
|
if dir != nil {
|
|
|
|
if !skipRoot {
|
2009-11-09 13:07:39 -07:00
|
|
|
c <- dir
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
for _, d := range dir.Dirs {
|
2009-11-09 13:07:39 -07:00
|
|
|
d.walk(c, false)
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (dir *Directory) iter(skipRoot bool) <-chan *Directory {
|
|
|
|
c := make(chan *Directory);
|
|
|
|
go func() {
|
|
|
|
dir.walk(c, skipRoot);
|
|
|
|
close(c);
|
|
|
|
}();
|
|
|
|
return c;
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// lookup looks for the *Directory for a given path, relative to dir.
|
|
|
|
func (dir *Directory) lookup(path string) *Directory {
|
2009-11-05 23:25:46 -07:00
|
|
|
path = pathutil.Clean(path); // no trailing '/'
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
if dir == nil || path == "" || path == "." {
|
2009-11-09 13:07:39 -07:00
|
|
|
return dir
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
dpath, dname := pathutil.Split(path);
|
|
|
|
if dpath == "" {
|
|
|
|
// directory-local name
|
|
|
|
for _, d := range dir.Dirs {
|
|
|
|
if dname == d.Name {
|
2009-11-09 13:07:39 -07:00
|
|
|
return d
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-05 23:25:46 -07:00
|
|
|
return nil;
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return dir.lookup(dpath).lookup(dname);
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// DirEntry describes a directory entry. The Depth and Height values
|
|
|
|
// are useful for presenting an entry in an indented fashion.
|
|
|
|
//
|
|
|
|
type DirEntry struct {
|
|
|
|
Depth int; // >= 0
|
|
|
|
Height int; // = DirList.MaxHeight - Depth, > 0
|
|
|
|
Path string; // includes Name, relative to DirList root
|
|
|
|
Name string;
|
|
|
|
Synopsis string;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
type DirList struct {
|
|
|
|
MaxHeight int; // directory tree height, > 0
|
|
|
|
List []DirEntry;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// listing creates a (linear) directory listing from a directory tree.
|
|
|
|
// If skipRoot is set, the root directory itself is excluded from the list.
|
|
|
|
//
|
|
|
|
func (root *Directory) listing(skipRoot bool) *DirList {
|
|
|
|
if root == nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// determine number of entries n and maximum height
|
|
|
|
n := 0;
|
2009-11-09 22:09:34 -07:00
|
|
|
minDepth := 1 << 30; // infinity
|
2009-11-08 17:47:32 -07:00
|
|
|
maxDepth := 0;
|
|
|
|
for d := range root.iter(skipRoot) {
|
|
|
|
n++;
|
|
|
|
if minDepth > d.Depth {
|
2009-11-09 13:07:39 -07:00
|
|
|
minDepth = d.Depth
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
if maxDepth < d.Depth {
|
2009-11-09 13:07:39 -07:00
|
|
|
maxDepth = d.Depth
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-09 22:09:34 -07:00
|
|
|
maxHeight := maxDepth - minDepth + 1;
|
2009-11-08 17:47:32 -07:00
|
|
|
|
|
|
|
if n == 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// create list
|
|
|
|
list := make([]DirEntry, n);
|
|
|
|
i := 0;
|
|
|
|
for d := range root.iter(skipRoot) {
|
|
|
|
p := &list[i];
|
|
|
|
p.Depth = d.Depth - minDepth;
|
|
|
|
p.Height = maxHeight - p.Depth;
|
|
|
|
// the path is relative to root.Path - remove the root.Path
|
|
|
|
// prefix (the prefix should always be present but avoid
|
|
|
|
// crashes and check)
|
|
|
|
path := d.Path;
|
|
|
|
if strings.HasPrefix(d.Path, root.Path) {
|
2009-11-09 13:07:39 -07:00
|
|
|
path = d.Path[len(root.Path):len(d.Path)]
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
// remove trailing '/' if any - path must be relative
|
|
|
|
if len(path) > 0 && path[0] == '/' {
|
2009-11-09 13:07:39 -07:00
|
|
|
path = path[1:len(path)]
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
p.Path = path;
|
|
|
|
p.Name = d.Name;
|
|
|
|
p.Synopsis = d.Text;
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
|
|
|
|
return &DirList{maxHeight, list};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func listing(dirs []*os.Dir) *DirList {
|
|
|
|
list := make([]DirEntry, len(dirs)+1);
|
|
|
|
list[0] = DirEntry{0, 1, "..", "..", ""};
|
|
|
|
for i, d := range dirs {
|
|
|
|
p := &list[i+1];
|
|
|
|
p.Depth = 0;
|
|
|
|
p.Height = 1;
|
|
|
|
p.Path = d.Name;
|
|
|
|
p.Name = d.Name;
|
|
|
|
}
|
|
|
|
return &DirList{1, list};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Parsing
|
|
|
|
|
|
|
|
// A single error in the parsed file.
|
|
|
|
type parseError struct {
|
2009-10-08 16:14:54 -06:00
|
|
|
src []byte; // source before error
|
|
|
|
line int; // line number of error
|
|
|
|
msg string; // error message
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// All the errors in the parsed file, plus surrounding source code.
|
|
|
|
// Each error has a slice giving the source text preceding it
|
|
|
|
// (starting where the last error occurred). The final element in list[]
|
|
|
|
// has msg = "", to give the remainder of the source code.
|
|
|
|
// This data structure is handed to the templates parseerror.txt and parseerror.html.
|
|
|
|
//
|
|
|
|
type parseErrors struct {
|
2009-10-08 16:14:54 -06:00
|
|
|
filename string; // path to file
|
|
|
|
list []parseError; // the errors
|
|
|
|
src []byte; // the file's entire source code
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Parses a file (path) and returns the corresponding AST and
|
|
|
|
// a sorted list (by file position) of errors, if any.
|
|
|
|
//
|
2009-07-16 18:11:18 -06:00
|
|
|
func parse(path string, mode uint) (*ast.File, *parseErrors) {
|
2009-06-16 10:14:06 -06:00
|
|
|
src, err := io.ReadFile(path);
|
|
|
|
if err != nil {
|
2009-10-01 15:08:00 -06:00
|
|
|
log.Stderrf("%v", err);
|
2009-06-16 10:14:06 -06:00
|
|
|
errs := []parseError{parseError{nil, 0, err.String()}};
|
|
|
|
return nil, &parseErrors{path, errs, nil};
|
|
|
|
}
|
|
|
|
|
2009-07-16 18:11:18 -06:00
|
|
|
prog, err := parser.ParseFile(path, src, mode);
|
2009-06-16 10:14:06 -06:00
|
|
|
if err != nil {
|
2009-07-14 17:30:06 -06:00
|
|
|
var errs []parseError;
|
2009-07-14 11:46:18 -06:00
|
|
|
if errors, ok := err.(scanner.ErrorList); ok {
|
2009-07-14 17:30:06 -06:00
|
|
|
// convert error list (already sorted)
|
|
|
|
// TODO(gri) If the file contains //line comments, the errors
|
|
|
|
// may not be sorted in increasing file offset value
|
|
|
|
// which will lead to incorrect output.
|
2009-10-08 16:14:54 -06:00
|
|
|
errs = make([]parseError, len(errors)+1); // +1 for final fragment of source
|
2009-06-16 10:14:06 -06:00
|
|
|
offs := 0;
|
|
|
|
for i, r := range errors {
|
|
|
|
// Should always be true, but check for robustness.
|
|
|
|
if 0 <= r.Pos.Offset && r.Pos.Offset <= len(src) {
|
2009-11-09 22:09:34 -07:00
|
|
|
errs[i].src = src[offs:r.Pos.Offset];
|
2009-06-16 10:14:06 -06:00
|
|
|
offs = r.Pos.Offset;
|
|
|
|
}
|
|
|
|
errs[i].line = r.Pos.Line;
|
|
|
|
errs[i].msg = r.Msg;
|
|
|
|
}
|
2009-10-08 16:14:54 -06:00
|
|
|
errs[len(errors)].src = src[offs:len(src)];
|
2009-06-16 10:14:06 -06:00
|
|
|
} else {
|
2009-07-14 17:30:06 -06:00
|
|
|
// single error of unspecified type
|
|
|
|
errs = make([]parseError, 2);
|
|
|
|
errs[0] = parseError{[]byte{}, 0, err.String()};
|
|
|
|
errs[1].src = src;
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-07-14 17:30:06 -06:00
|
|
|
return nil, &parseErrors{path, errs, src};
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return prog, nil;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// HTML formatting support
|
|
|
|
|
|
|
|
// Styler implements a printer.Styler.
|
|
|
|
type Styler struct {
|
2009-11-16 15:26:29 -07:00
|
|
|
linetags bool;
|
|
|
|
highlight string;
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 11:00:57 -07:00
|
|
|
// Use the defaultStyler when there is no specific styler.
|
2009-11-16 15:26:29 -07:00
|
|
|
// The defaultStyler does not emit line tags since they may
|
|
|
|
// interfere with tags emitted by templates.
|
|
|
|
// TODO(gri): Should emit line tags at the beginning of a line;
|
|
|
|
// never in the middle of code.
|
2009-11-03 11:00:57 -07:00
|
|
|
var defaultStyler Styler
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) LineTag(line int) (text []byte, tag printer.HTMLTag) {
|
2009-11-16 15:26:29 -07:00
|
|
|
if s.linetags {
|
|
|
|
tag = printer.HTMLTag{fmt.Sprintf(`<a id="L%d">`, line), "</a>"}
|
|
|
|
}
|
2009-10-22 10:41:38 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Comment(c *ast.Comment, line []byte) (text []byte, tag printer.HTMLTag) {
|
2009-10-22 10:41:38 -06:00
|
|
|
text = line;
|
|
|
|
// minimal syntax-coloring of comments for now - people will want more
|
|
|
|
// (don't do anything more until there's a button to turn it on/off)
|
2009-11-08 22:46:20 -07:00
|
|
|
tag = printer.HTMLTag{`<span class="comment">`, "</span>"};
|
2009-10-22 10:41:38 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) BasicLit(x *ast.BasicLit) (text []byte, tag printer.HTMLTag) {
|
2009-10-22 10:41:38 -06:00
|
|
|
text = x.Value;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Ident(id *ast.Ident) (text []byte, tag printer.HTMLTag) {
|
2009-10-22 10:41:38 -06:00
|
|
|
text = strings.Bytes(id.Value);
|
|
|
|
if s.highlight == id.Value {
|
2009-11-09 13:07:39 -07:00
|
|
|
tag = printer.HTMLTag{"<span class=highlight>", "</span>"}
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Token(tok token.Token) (text []byte, tag printer.HTMLTag) {
|
2009-10-22 10:41:38 -06:00
|
|
|
text = strings.Bytes(tok.String());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Templates
|
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write an AST-node to w; optionally html-escaped.
|
2009-10-27 11:34:31 -06:00
|
|
|
func writeNode(w io.Writer, node interface{}, html bool, styler printer.Styler) {
|
2009-07-31 19:04:53 -06:00
|
|
|
mode := printer.UseSpaces;
|
|
|
|
if html {
|
2009-11-09 13:07:39 -07:00
|
|
|
mode |= printer.GenHTML
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
2009-10-27 11:34:31 -06:00
|
|
|
(&printer.Config{mode, *tabwidth, styler}).Fprint(w, node);
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write text to w; optionally html-escaped.
|
|
|
|
func writeText(w io.Writer, text []byte, html bool) {
|
|
|
|
if html {
|
2009-11-08 22:46:20 -07:00
|
|
|
template.HTMLEscape(w, text);
|
2009-07-31 19:04:53 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
w.Write(text);
|
|
|
|
}
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write anything to w; optionally html-escaped.
|
|
|
|
func writeAny(w io.Writer, x interface{}, html bool) {
|
2009-06-16 10:14:06 -06:00
|
|
|
switch v := x.(type) {
|
|
|
|
case []byte:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeText(w, v, html)
|
2009-06-16 10:14:06 -06:00
|
|
|
case string:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeText(w, strings.Bytes(v), html)
|
2009-06-16 10:14:06 -06:00
|
|
|
case ast.Decl:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeNode(w, v, html, &defaultStyler)
|
2009-06-16 10:14:06 -06:00
|
|
|
case ast.Expr:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeNode(w, v, html, &defaultStyler)
|
2009-07-31 19:04:53 -06:00
|
|
|
default:
|
|
|
|
if html {
|
|
|
|
var buf bytes.Buffer;
|
|
|
|
fmt.Fprint(&buf, x);
|
2009-09-16 16:15:00 -06:00
|
|
|
writeText(w, buf.Bytes(), true);
|
2009-07-31 19:04:53 -06:00
|
|
|
} else {
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprint(w, x)
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "html" format.
|
|
|
|
func htmlFmt(w io.Writer, x interface{}, format string) {
|
2009-11-09 13:07:39 -07:00
|
|
|
writeAny(w, x, true)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "html-comment" format.
|
|
|
|
func htmlCommentFmt(w io.Writer, x interface{}, format string) {
|
2009-07-31 19:04:53 -06:00
|
|
|
var buf bytes.Buffer;
|
|
|
|
writeAny(&buf, x, false);
|
2009-11-08 22:46:20 -07:00
|
|
|
doc.ToHTML(w, buf.Bytes()); // does html-escaping
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "" (default) format.
|
|
|
|
func textFmt(w io.Writer, x interface{}, format string) {
|
2009-11-09 13:07:39 -07:00
|
|
|
writeAny(w, x, false)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
func removePrefix(s, prefix string) string {
|
|
|
|
if strings.HasPrefix(s, prefix) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return s[len(prefix):len(s)]
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
|
|
|
return s;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "path" format.
|
|
|
|
func pathFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
// TODO(gri): Need to find a better solution for this.
|
|
|
|
// This will not work correctly if *cmdroot
|
|
|
|
// or *pkgroot change.
|
2009-11-09 13:07:39 -07:00
|
|
|
writeAny(w, removePrefix(x.(string), "src"), true)
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-08-03 10:53:00 -06:00
|
|
|
// Template formatter for "link" format.
|
|
|
|
func linkFmt(w io.Writer, x interface{}, format string) {
|
2009-10-08 16:14:54 -06:00
|
|
|
type Positioner interface {
|
|
|
|
Pos() token.Position;
|
|
|
|
}
|
2009-08-03 10:53:00 -06:00
|
|
|
if node, ok := x.(Positioner); ok {
|
|
|
|
pos := node.Pos();
|
|
|
|
if pos.IsValid() {
|
|
|
|
// line id's in html-printed source are of the
|
|
|
|
// form "L%d" where %d stands for the line number
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprintf(w, "/%s#L%d", htmlEscape(pos.Filename), pos.Line)
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 21:17:04 -07:00
|
|
|
// The strings in infoKinds must be properly html-escaped.
|
|
|
|
var infoKinds = [nKinds]string{
|
|
|
|
PackageClause: "package clause",
|
|
|
|
ImportDecl: "import decl",
|
|
|
|
ConstDecl: "const decl",
|
|
|
|
TypeDecl: "type decl",
|
|
|
|
VarDecl: "var decl",
|
|
|
|
FuncDecl: "func decl",
|
|
|
|
MethodDecl: "method decl",
|
|
|
|
Use: "use",
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 21:17:04 -07:00
|
|
|
// Template formatter for "infoKind" format.
|
|
|
|
func infoKindFmt(w io.Writer, x interface{}, format string) {
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprintf(w, infoKinds[x.(SpotKind)]) // infoKind entries are html-escaped
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "infoLine" format.
|
|
|
|
func infoLineFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
info := x.(SpotInfo);
|
|
|
|
line := info.Lori();
|
|
|
|
if info.IsIndex() {
|
|
|
|
index, _ := searchIndex.get();
|
|
|
|
line = index.(*Index).Snippet(line).Line;
|
|
|
|
}
|
|
|
|
fmt.Fprintf(w, "%d", line);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "infoSnippet" format.
|
|
|
|
func infoSnippetFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
info := x.(SpotInfo);
|
|
|
|
text := `<span class="alert">no snippet text available</span>`;
|
|
|
|
if info.IsIndex() {
|
|
|
|
index, _ := searchIndex.get();
|
2009-10-28 17:19:09 -06:00
|
|
|
// no escaping of snippet text needed;
|
|
|
|
// snippet text is escaped when generated
|
2009-10-27 11:34:31 -06:00
|
|
|
text = index.(*Index).Snippet(info.Lori()).Text;
|
|
|
|
}
|
2009-10-28 17:19:09 -06:00
|
|
|
fmt.Fprint(w, text);
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// Template formatter for "padding" format.
|
|
|
|
func paddingFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
for i := x.(int); i > 0; i-- {
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprint(w, `<td width="25"></td>`)
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "time" format.
|
|
|
|
func timeFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
// note: os.Dir.Mtime_ns is in uint64 in ns!
|
2009-11-09 22:09:34 -07:00
|
|
|
template.HTMLEscape(w, strings.Bytes(time.SecondsToLocalTime(int64(x.(uint64)/1e9)).String()))
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
var fmap = template.FormatterMap{
|
|
|
|
"": textFmt,
|
|
|
|
"html": htmlFmt,
|
|
|
|
"html-comment": htmlCommentFmt,
|
2009-11-03 20:40:26 -07:00
|
|
|
"path": pathFmt,
|
2009-08-03 10:53:00 -06:00
|
|
|
"link": linkFmt,
|
2009-11-08 21:17:04 -07:00
|
|
|
"infoKind": infoKindFmt,
|
2009-10-27 11:34:31 -06:00
|
|
|
"infoLine": infoLineFmt,
|
|
|
|
"infoSnippet": infoSnippetFmt,
|
2009-11-08 17:47:32 -07:00
|
|
|
"padding": paddingFmt,
|
|
|
|
"time": timeFmt,
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func readTemplate(name string) *template.Template {
|
|
|
|
path := pathutil.Join(*tmplroot, name);
|
|
|
|
data, err := io.ReadFile(path);
|
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Exitf("ReadFile %s: %v", path, err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-11-02 21:35:52 -07:00
|
|
|
t, err := template.Parse(string(data), fmap);
|
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Exitf("%s: %v", name, err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
var (
|
2009-11-08 22:46:20 -07:00
|
|
|
dirlistHTML,
|
|
|
|
godocHTML,
|
|
|
|
packageHTML,
|
2009-11-05 23:25:46 -07:00
|
|
|
packageText,
|
2009-11-08 22:46:20 -07:00
|
|
|
parseerrorHTML,
|
2009-11-05 23:25:46 -07:00
|
|
|
parseerrorText,
|
2009-11-08 22:46:20 -07:00
|
|
|
searchHTML *template.Template;
|
2009-10-22 10:41:38 -06:00
|
|
|
)
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
func readTemplates() {
|
|
|
|
// have to delay until after flags processing,
|
|
|
|
// so that main has chdir'ed to goroot.
|
2009-11-08 22:46:20 -07:00
|
|
|
dirlistHTML = readTemplate("dirlist.html");
|
|
|
|
godocHTML = readTemplate("godoc.html");
|
|
|
|
packageHTML = readTemplate("package.html");
|
2009-06-16 10:14:06 -06:00
|
|
|
packageText = readTemplate("package.txt");
|
2009-11-08 22:46:20 -07:00
|
|
|
parseerrorHTML = readTemplate("parseerror.html");
|
2009-06-16 10:14:06 -06:00
|
|
|
parseerrorText = readTemplate("parseerror.txt");
|
2009-11-08 22:46:20 -07:00
|
|
|
searchHTML = readTemplate("search.html");
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Generic HTML wrapper
|
|
|
|
|
2009-10-27 11:34:31 -06:00
|
|
|
func servePage(c *http.Conn, title, query string, content []byte) {
|
2009-06-16 10:14:06 -06:00
|
|
|
type Data struct {
|
2009-10-27 11:34:31 -06:00
|
|
|
Title string;
|
2009-11-08 17:47:32 -07:00
|
|
|
Timestamp uint64; // int64 to be compatible with os.Dir.Mtime_ns
|
2009-10-27 11:34:31 -06:00
|
|
|
Query string;
|
|
|
|
Content []byte;
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
_, ts := fsTree.get();
|
2009-10-01 15:08:00 -06:00
|
|
|
d := Data{
|
2009-10-27 11:34:31 -06:00
|
|
|
Title: title,
|
2009-11-09 22:09:34 -07:00
|
|
|
Timestamp: uint64(ts) * 1e9, // timestamp in ns
|
2009-10-27 11:34:31 -06:00
|
|
|
Query: query,
|
|
|
|
Content: content,
|
2009-10-01 15:08:00 -06:00
|
|
|
};
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := godocHTML.Execute(&d, c); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("godocHTML.Execute: %s", err)
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func serveText(c *http.Conn, text []byte) {
|
|
|
|
c.SetHeader("content-type", "text/plain; charset=utf-8");
|
|
|
|
c.Write(text);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Files
|
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
var (
|
2009-10-08 16:14:54 -06:00
|
|
|
tagBegin = strings.Bytes("<!--");
|
|
|
|
tagEnd = strings.Bytes("-->");
|
2009-10-01 15:08:00 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
// commentText returns the text of the first HTML comment in src.
|
|
|
|
func commentText(src []byte) (text string) {
|
|
|
|
i := bytes.Index(src, tagBegin);
|
|
|
|
j := bytes.Index(src, tagEnd);
|
|
|
|
if i >= 0 && j >= i+len(tagBegin) {
|
2009-11-09 13:07:39 -07:00
|
|
|
text = string(bytes.TrimSpace(src[i+len(tagBegin) : j]))
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func serveHTMLDoc(c *http.Conn, r *http.Request, path string) {
|
2009-10-01 15:08:00 -06:00
|
|
|
// get HTML body contents
|
|
|
|
src, err := io.ReadFile(path);
|
|
|
|
if err != nil {
|
|
|
|
log.Stderrf("%v", err);
|
|
|
|
http.NotFound(c, r);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// if it's the language spec, add tags to EBNF productions
|
|
|
|
if strings.HasSuffix(path, "go_spec.html") {
|
|
|
|
var buf bytes.Buffer;
|
|
|
|
linkify(&buf, src);
|
|
|
|
src = buf.Bytes();
|
|
|
|
}
|
|
|
|
|
|
|
|
title := commentText(src);
|
2009-10-27 11:34:31 -06:00
|
|
|
servePage(c, title, "", src);
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
func serveParseErrors(c *http.Conn, errors *parseErrors) {
|
|
|
|
// format errors
|
2009-06-29 16:24:23 -06:00
|
|
|
var buf bytes.Buffer;
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := parseerrorHTML.Execute(errors, &buf); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("parseerrorHTML.Execute: %s", err)
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2009-11-09 22:09:34 -07:00
|
|
|
servePage(c, "Parse errors in source file "+errors.filename, "", buf.Bytes());
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
func serveGoSource(c *http.Conn, r *http.Request, path string, styler printer.Styler) {
|
2009-10-01 15:08:00 -06:00
|
|
|
prog, errors := parse(path, parser.ParseComments);
|
2009-06-16 10:14:06 -06:00
|
|
|
if errors != nil {
|
|
|
|
serveParseErrors(c, errors);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2009-06-29 16:24:23 -06:00
|
|
|
var buf bytes.Buffer;
|
2009-06-16 10:14:06 -06:00
|
|
|
fmt.Fprintln(&buf, "<pre>");
|
2009-10-27 11:34:31 -06:00
|
|
|
writeNode(&buf, prog, true, styler);
|
2009-06-16 10:14:06 -06:00
|
|
|
fmt.Fprintln(&buf, "</pre>");
|
|
|
|
|
2009-11-09 22:09:34 -07:00
|
|
|
servePage(c, "Source file "+r.URL.Path, "", buf.Bytes());
|
2009-09-03 10:58:13 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-07 22:12:46 -07:00
|
|
|
func redirect(c *http.Conn, r *http.Request) (redirected bool) {
|
2009-11-08 22:46:20 -07:00
|
|
|
if canonical := pathutil.Clean(r.URL.Path) + "/"; r.URL.Path != canonical {
|
2009-11-07 22:12:46 -07:00
|
|
|
http.Redirect(c, canonical, http.StatusMovedPermanently);
|
|
|
|
redirected = true;
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
// TODO(gri): Should have a mapping from extension to handler, eventually.
|
|
|
|
|
|
|
|
// textExt[x] is true if the extension x indicates a text file, and false otherwise.
|
|
|
|
var textExt = map[string]bool{
|
|
|
|
".css": false, // must be served raw
|
|
|
|
".js": false, // must be served raw
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isTextFile(path string) bool {
|
|
|
|
// if the extension is known, use it for decision making
|
|
|
|
if isText, found := textExt[pathutil.Ext(path)]; found {
|
2009-11-09 13:07:39 -07:00
|
|
|
return isText
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// the extension is not known; read an initial chunk of
|
|
|
|
// file and check if it looks like correct UTF-8; if it
|
|
|
|
// does, it's probably a text file
|
|
|
|
f, err := os.Open(path, os.O_RDONLY, 0);
|
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
var buf [1024]byte;
|
|
|
|
n, err := f.Read(&buf);
|
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
s := string(buf[0:n]);
|
|
|
|
n -= utf8.UTFMax; // make sure there's enough bytes for a complete unicode char
|
|
|
|
for i, c := range s {
|
|
|
|
if i > n {
|
2009-11-09 13:07:39 -07:00
|
|
|
break
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
if c == 0xFFFD || c < ' ' && c != '\n' && c != '\t' {
|
|
|
|
// decoding error or control character - not a text file
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// likely a text file
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func serveTextFile(c *http.Conn, r *http.Request, path string) {
|
|
|
|
src, err := io.ReadFile(path);
|
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("serveTextFile: %s", err)
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
var buf bytes.Buffer;
|
|
|
|
fmt.Fprintln(&buf, "<pre>");
|
2009-11-08 22:46:20 -07:00
|
|
|
template.HTMLEscape(&buf, src);
|
2009-11-08 01:40:43 -07:00
|
|
|
fmt.Fprintln(&buf, "</pre>");
|
|
|
|
|
2009-11-09 22:09:34 -07:00
|
|
|
servePage(c, "Text file "+path, "", buf.Bytes());
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func serveDirectory(c *http.Conn, r *http.Request, path string) {
|
2009-11-07 22:12:46 -07:00
|
|
|
if redirect(c, r) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
list, err := io.ReadDir(path);
|
|
|
|
if err != nil {
|
|
|
|
http.NotFound(c, r);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
var buf bytes.Buffer;
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := dirlistHTML.Execute(list, &buf); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("dirlistHTML.Execute: %s", err)
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
2009-11-09 22:09:34 -07:00
|
|
|
servePage(c, "Directory "+path, "", buf.Bytes());
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-08 16:14:54 -06:00
|
|
|
var fileServer = http.FileServer(".", "")
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
func serveFile(c *http.Conn, r *http.Request) {
|
2009-11-08 22:46:20 -07:00
|
|
|
path := pathutil.Join(".", r.URL.Path);
|
2009-10-01 15:08:00 -06:00
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// pick off special cases and hand the rest to the standard file server
|
2009-10-01 15:08:00 -06:00
|
|
|
switch ext := pathutil.Ext(path); {
|
2009-11-08 22:46:20 -07:00
|
|
|
case r.URL.Path == "/":
|
|
|
|
serveHTMLDoc(c, r, "doc/root.html");
|
2009-11-08 01:40:43 -07:00
|
|
|
return;
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
case r.URL.Path == "/doc/root.html":
|
2009-06-16 10:14:06 -06:00
|
|
|
// hide landing page from its real name
|
2009-10-01 15:08:00 -06:00
|
|
|
http.NotFound(c, r);
|
2009-11-08 01:40:43 -07:00
|
|
|
return;
|
2009-10-01 15:08:00 -06:00
|
|
|
|
|
|
|
case ext == ".html":
|
2009-11-08 22:46:20 -07:00
|
|
|
serveHTMLDoc(c, r, path);
|
2009-11-08 01:40:43 -07:00
|
|
|
return;
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
case ext == ".go":
|
2009-11-16 15:26:29 -07:00
|
|
|
serveGoSource(c, r, path, &Styler{linetags: true, highlight: r.FormValue("h")});
|
2009-11-08 01:40:43 -07:00
|
|
|
return;
|
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
dir, err := os.Lstat(path);
|
|
|
|
if err != nil {
|
|
|
|
http.NotFound(c, r);
|
|
|
|
return;
|
|
|
|
}
|
2009-11-07 22:12:46 -07:00
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
if dir != nil && dir.IsDirectory() {
|
|
|
|
serveDirectory(c, r, path);
|
|
|
|
return;
|
|
|
|
}
|
2009-11-07 22:12:46 -07:00
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
if isTextFile(path) {
|
|
|
|
serveTextFile(c, r, path);
|
|
|
|
return;
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-11-08 01:40:43 -07:00
|
|
|
|
|
|
|
fileServer.ServeHTTP(c, r);
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Packages
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
// Package name used for commands that have non-identifier names.
|
|
|
|
const fakePkgName = "documentation"
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
|
2009-07-29 18:01:09 -06:00
|
|
|
type PageInfo struct {
|
2009-10-08 16:14:54 -06:00
|
|
|
PDoc *doc.PackageDoc; // nil if no package found
|
2009-11-08 17:47:32 -07:00
|
|
|
Dirs *DirList; // nil if no directory information found
|
2009-11-03 20:40:26 -07:00
|
|
|
IsPkg bool; // false if this is not documenting a real package
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
type httpHandler struct {
|
2009-11-05 23:25:46 -07:00
|
|
|
pattern string; // url pattern; e.g. "/pkg/"
|
|
|
|
fsRoot string; // file system root to which the pattern is mapped
|
|
|
|
isPkg bool; // true if this handler serves real package documentation (as opposed to command documentation)
|
2009-07-29 18:01:09 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
|
2009-07-29 18:01:09 -06:00
|
|
|
// getPageInfo returns the PageInfo for a given package directory.
|
|
|
|
// If there is no corresponding package in the directory,
|
|
|
|
// PageInfo.PDoc is nil. If there are no subdirectories,
|
|
|
|
// PageInfo.Dirs is nil.
|
|
|
|
//
|
2009-11-03 20:40:26 -07:00
|
|
|
func (h *httpHandler) getPageInfo(path string) PageInfo {
|
|
|
|
// the path is relative to h.fsroot
|
|
|
|
dirname := pathutil.Join(h.fsRoot, path);
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-07-16 18:11:18 -06:00
|
|
|
// the package name is the directory name within its parent
|
2009-11-03 20:40:26 -07:00
|
|
|
// (use dirname instead of path because dirname is clean; i.e. has no trailing '/')
|
2009-07-29 18:01:09 -06:00
|
|
|
_, pkgname := pathutil.Split(dirname);
|
|
|
|
|
2009-11-02 23:44:01 -07:00
|
|
|
// filter function to select the desired .go files
|
2009-07-29 18:01:09 -06:00
|
|
|
filter := func(d *os.Dir) bool {
|
2009-10-27 11:34:31 -06:00
|
|
|
if isPkgFile(d) {
|
2009-07-29 18:01:09 -06:00
|
|
|
// Some directories contain main packages: Only accept
|
|
|
|
// files that belong to the expected package so that
|
|
|
|
// parser.ParsePackage doesn't return "multiple packages
|
|
|
|
// found" errors.
|
2009-11-03 20:40:26 -07:00
|
|
|
// Additionally, accept the special package name
|
|
|
|
// fakePkgName if we are looking at cmd documentation.
|
|
|
|
name := pkgName(dirname + "/" + d.Name);
|
|
|
|
return name == pkgname || h.fsRoot == *cmdroot && name == fakePkgName;
|
2009-07-29 18:01:09 -06:00
|
|
|
}
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
|
|
|
// get package AST
|
|
|
|
pkg, err := parser.ParsePackage(dirname, filter, parser.ParseComments);
|
|
|
|
if err != nil {
|
2009-10-01 15:08:00 -06:00
|
|
|
// TODO: parse errors should be shown instead of an empty directory
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("parser.parsePackage: %s", err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-07-29 18:01:09 -06:00
|
|
|
// compute package documentation
|
|
|
|
var pdoc *doc.PackageDoc;
|
|
|
|
if pkg != nil {
|
2009-07-30 19:13:55 -06:00
|
|
|
ast.PackageExports(pkg);
|
2009-10-08 16:14:54 -06:00
|
|
|
pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(path)); // no trailing '/' in importpath
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-11-02 23:44:01 -07:00
|
|
|
// get directory information
|
|
|
|
var dir *Directory;
|
2009-11-03 20:40:26 -07:00
|
|
|
if tree, _ := fsTree.get(); tree != nil {
|
2009-11-02 23:44:01 -07:00
|
|
|
// directory tree is present; lookup respective directory
|
|
|
|
// (may still fail if the file system was updated and the
|
|
|
|
// new directory tree has not yet beet computed)
|
2009-11-09 13:07:39 -07:00
|
|
|
dir = tree.(*Directory).lookup(dirname)
|
2009-11-02 23:44:01 -07:00
|
|
|
} else {
|
|
|
|
// no directory tree present (either early after startup
|
|
|
|
// or command-line mode); compute one level for this page
|
2009-11-09 13:07:39 -07:00
|
|
|
dir = newDirectory(dirname, 1)
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
2009-11-05 23:25:46 -07:00
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
return PageInfo{pdoc, dir.listing(true), h.isPkg};
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
func (h *httpHandler) ServeHTTP(c *http.Conn, r *http.Request) {
|
2009-11-07 22:12:46 -07:00
|
|
|
if redirect(c, r) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
path := r.URL.Path;
|
2009-11-07 22:12:46 -07:00
|
|
|
path = path[len(h.pattern):len(path)];
|
2009-11-03 20:40:26 -07:00
|
|
|
info := h.getPageInfo(path);
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-06-29 16:24:23 -06:00
|
|
|
var buf bytes.Buffer;
|
2009-10-28 17:19:09 -06:00
|
|
|
if r.FormValue("f") == "text" {
|
2009-08-03 10:53:00 -06:00
|
|
|
if err := packageText.Execute(info, &buf); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("packageText.Execute: %s", err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-09-16 16:15:00 -06:00
|
|
|
serveText(c, buf.Bytes());
|
2009-06-16 10:14:06 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := packageHTML.Execute(info, &buf); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("packageHTML.Execute: %s", err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if path == "" {
|
2009-11-09 13:07:39 -07:00
|
|
|
path = "." // don't display an empty path
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-10-01 15:08:00 -06:00
|
|
|
title := "Directory " + path;
|
|
|
|
if info.PDoc != nil {
|
2009-11-03 20:40:26 -07:00
|
|
|
switch {
|
|
|
|
case h.isPkg:
|
2009-11-09 13:07:39 -07:00
|
|
|
title = "Package " + info.PDoc.PackageName
|
2009-11-03 20:40:26 -07:00
|
|
|
case info.PDoc.PackageName == fakePkgName:
|
|
|
|
// assume that the directory name is the command name
|
|
|
|
_, pkgname := pathutil.Split(pathutil.Clean(path));
|
|
|
|
title = "Command " + pkgname;
|
|
|
|
default:
|
2009-11-09 13:07:39 -07:00
|
|
|
title = "Command " + info.PDoc.PackageName
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
2009-10-27 11:34:31 -06:00
|
|
|
servePage(c, title, "", buf.Bytes());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Search
|
|
|
|
|
|
|
|
var searchIndex RWValue
|
|
|
|
|
|
|
|
type SearchResult struct {
|
|
|
|
Query string;
|
|
|
|
Hit *LookupResult;
|
|
|
|
Alt *AltWords;
|
2009-11-09 00:34:08 -07:00
|
|
|
Illegal bool;
|
2009-10-27 11:34:31 -06:00
|
|
|
Accurate bool;
|
|
|
|
}
|
|
|
|
|
|
|
|
func search(c *http.Conn, r *http.Request) {
|
|
|
|
query := r.FormValue("q");
|
|
|
|
var result SearchResult;
|
|
|
|
|
|
|
|
if index, timestamp := searchIndex.get(); index != nil {
|
|
|
|
result.Query = query;
|
2009-11-09 00:34:08 -07:00
|
|
|
result.Hit, result.Alt, result.Illegal = index.(*Index).Lookup(query);
|
2009-11-03 20:40:26 -07:00
|
|
|
_, ts := fsTree.get();
|
2009-10-27 11:34:31 -06:00
|
|
|
result.Accurate = timestamp >= ts;
|
|
|
|
}
|
|
|
|
|
|
|
|
var buf bytes.Buffer;
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := searchHTML.Execute(result, &buf); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("searchHTML.Execute: %s", err)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
var title string;
|
|
|
|
if result.Hit != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
title = fmt.Sprintf(`Results for query %q`, query)
|
2009-10-27 11:34:31 -06:00
|
|
|
} else {
|
2009-11-09 13:07:39 -07:00
|
|
|
title = fmt.Sprintf(`No results found for query %q`, query)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
servePage(c, title, query, buf.Bytes());
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Server
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
var (
|
2009-11-05 23:25:46 -07:00
|
|
|
cmdHandler = httpHandler{"/cmd/", *cmdroot, false};
|
|
|
|
pkgHandler = httpHandler{"/pkg/", *pkgroot, true};
|
2009-11-03 20:40:26 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2009-10-30 11:58:53 -06:00
|
|
|
func registerPublicHandlers(mux *http.ServeMux) {
|
2009-11-03 20:40:26 -07:00
|
|
|
mux.Handle(cmdHandler.pattern, &cmdHandler);
|
|
|
|
mux.Handle(pkgHandler.pattern, &pkgHandler);
|
2009-10-30 11:58:53 -06:00
|
|
|
mux.Handle("/search", http.HandlerFunc(search));
|
|
|
|
mux.Handle("/", http.HandlerFunc(serveFile));
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-30 11:58:53 -06:00
|
|
|
// Indexing goroutine.
|
|
|
|
func indexer() {
|
|
|
|
for {
|
2009-11-03 20:40:26 -07:00
|
|
|
_, ts := fsTree.get();
|
2009-10-30 11:58:53 -06:00
|
|
|
if _, timestamp := searchIndex.get(); timestamp < ts {
|
|
|
|
// index possibly out of date - make a new one
|
|
|
|
// (could use a channel to send an explicit signal
|
|
|
|
// from the sync goroutine, but this solution is
|
|
|
|
// more decoupled, trivial, and works well enough)
|
|
|
|
start := time.Nanoseconds();
|
|
|
|
index := NewIndex(".");
|
|
|
|
stop := time.Nanoseconds();
|
|
|
|
searchIndex.set(index);
|
|
|
|
if *verbose {
|
2009-11-09 22:09:34 -07:00
|
|
|
secs := float64((stop-start)/1e6) / 1e3;
|
2009-10-30 11:58:53 -06:00
|
|
|
nwords, nspots := index.Size();
|
|
|
|
log.Stderrf("index updated (%gs, %d unique words, %d spots)", secs, nwords, nspots);
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-11-09 22:09:34 -07:00
|
|
|
time.Sleep(1 * 60e9); // try once a minute
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|