2009-06-16 10:14:06 -06:00
|
|
|
// Copyright 2009 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2009-12-15 16:33:31 -07:00
|
|
|
"bytes"
|
|
|
|
"flag"
|
|
|
|
"fmt"
|
|
|
|
"go/ast"
|
|
|
|
"go/doc"
|
|
|
|
"go/parser"
|
|
|
|
"go/printer"
|
|
|
|
"go/token"
|
|
|
|
"http"
|
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
pathutil "path"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"template"
|
|
|
|
"time"
|
|
|
|
"unicode"
|
|
|
|
"utf8"
|
2009-06-16 10:14:06 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Support types
|
2009-08-28 12:28:05 -06:00
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// An RWValue wraps a value and permits mutually exclusive
|
|
|
|
// access to it and records the time the value was last set.
|
|
|
|
type RWValue struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
mutex sync.RWMutex
|
|
|
|
value interface{}
|
|
|
|
timestamp int64 // time of last set(), in seconds since epoch
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (v *RWValue) set(value interface{}) {
|
2009-12-15 16:33:31 -07:00
|
|
|
v.mutex.Lock()
|
|
|
|
v.value = value
|
|
|
|
v.timestamp = time.Seconds()
|
|
|
|
v.mutex.Unlock()
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (v *RWValue) get() (interface{}, int64) {
|
2009-12-15 16:33:31 -07:00
|
|
|
v.mutex.RLock()
|
|
|
|
defer v.mutex.RUnlock()
|
|
|
|
return v.value, v.timestamp
|
2009-08-28 12:28:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Globals
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
type delayTime struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
RWValue
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
func (dt *delayTime) backoff(max int) {
|
2009-12-15 16:33:31 -07:00
|
|
|
dt.mutex.Lock()
|
|
|
|
v := dt.value.(int) * 2
|
2009-10-22 10:41:38 -06:00
|
|
|
if v > max {
|
2009-11-09 13:07:39 -07:00
|
|
|
v = max
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
dt.value = v
|
|
|
|
dt.mutex.Unlock()
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var (
|
2009-12-15 16:33:31 -07:00
|
|
|
verbose = flag.Bool("v", false, "verbose mode")
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// "fixed" file system roots
|
2009-12-15 16:33:31 -07:00
|
|
|
goroot string
|
2010-02-16 12:20:55 -07:00
|
|
|
cmdroot string
|
|
|
|
pkgroot string
|
|
|
|
tmplroot string
|
|
|
|
|
|
|
|
// additional file system roots to consider
|
2010-02-16 16:36:55 -07:00
|
|
|
path = flag.String("path", "", "additional package directories (colon-separated)")
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
// layout control
|
2009-12-15 16:33:31 -07:00
|
|
|
tabwidth = flag.Int("tabwidth", 4, "tab width")
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// file system mapping
|
|
|
|
fsMap Mapping // user-defined mapping
|
|
|
|
fsTree RWValue // *Directory tree of packages, updated with each sync
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// http handlers
|
|
|
|
fileServer http.Handler // default file server
|
|
|
|
cmdHandler httpHandler
|
|
|
|
pkgHandler httpHandler
|
|
|
|
)
|
2009-11-03 20:40:26 -07:00
|
|
|
|
|
|
|
|
2010-02-16 12:54:12 -07:00
|
|
|
func init() {
|
2009-12-15 16:33:31 -07:00
|
|
|
goroot = os.Getenv("GOROOT")
|
2009-07-02 10:47:25 -06:00
|
|
|
if goroot == "" {
|
2009-11-09 13:07:39 -07:00
|
|
|
goroot = pathutil.Join(os.Getenv("HOME"), "go")
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
flag.StringVar(&goroot, "goroot", goroot, "Go root directory")
|
2010-02-16 12:20:55 -07:00
|
|
|
|
|
|
|
// other flags/variables that depend on goroot
|
|
|
|
flag.StringVar(&cmdroot, "cmdroot", pathutil.Join(goroot, "src/cmd"), "command source directory")
|
|
|
|
flag.StringVar(&pkgroot, "pkgroot", pathutil.Join(goroot, "src/pkg"), "package source directory")
|
|
|
|
flag.StringVar(&tmplroot, "tmplroot", pathutil.Join(goroot, "lib/godoc"), "template directory")
|
2010-02-16 13:49:41 -07:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
|
2010-02-16 13:49:41 -07:00
|
|
|
func initHandlers() {
|
2010-02-16 12:20:55 -07:00
|
|
|
fsMap.Init(*path)
|
|
|
|
fileServer = http.FileServer(goroot, "")
|
|
|
|
cmdHandler = httpHandler{"/cmd/", cmdroot, false}
|
|
|
|
pkgHandler = httpHandler{"/pkg/", pkgroot, true}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func registerPublicHandlers(mux *http.ServeMux) {
|
|
|
|
mux.Handle(cmdHandler.pattern, &cmdHandler)
|
|
|
|
mux.Handle(pkgHandler.pattern, &pkgHandler)
|
|
|
|
mux.Handle("/search", http.HandlerFunc(search))
|
|
|
|
mux.Handle("/", http.HandlerFunc(serveFile))
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
2009-10-28 17:19:09 -06:00
|
|
|
// Predicates and small utility functions
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
func isGoFile(dir *os.Dir) bool {
|
2009-10-08 16:14:54 -06:00
|
|
|
return dir.IsRegular() &&
|
2009-12-15 16:33:31 -07:00
|
|
|
!strings.HasPrefix(dir.Name, ".") && // ignore .files
|
2009-11-09 13:07:39 -07:00
|
|
|
pathutil.Ext(dir.Name) == ".go"
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isPkgFile(dir *os.Dir) bool {
|
|
|
|
return isGoFile(dir) &&
|
2009-12-15 16:33:31 -07:00
|
|
|
!strings.HasSuffix(dir.Name, "_test.go") // ignore test files
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isPkgDir(dir *os.Dir) bool {
|
2009-11-09 13:07:39 -07:00
|
|
|
return dir.IsDirectory() && len(dir.Name) > 0 && dir.Name[0] != '_'
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
func pkgName(filename string) string {
|
2010-01-27 10:44:28 -07:00
|
|
|
file, err := parser.ParseFile(filename, nil, nil, parser.PackageClauseOnly)
|
2009-11-03 20:40:26 -07:00
|
|
|
if err != nil || file == nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return ""
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
2010-01-15 14:27:45 -07:00
|
|
|
return file.Name.Name()
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-28 17:19:09 -06:00
|
|
|
func htmlEscape(s string) string {
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
|
|
|
template.HTMLEscape(&buf, strings.Bytes(s))
|
|
|
|
return buf.String()
|
2009-10-28 17:19:09 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-07 14:17:53 -07:00
|
|
|
func firstSentence(s string) string {
|
2009-12-15 16:33:31 -07:00
|
|
|
i := -1 // index+1 of first period
|
|
|
|
j := -1 // index+1 of first period that is followed by white space
|
|
|
|
prev := 'A'
|
2009-11-08 22:48:51 -07:00
|
|
|
for k, ch := range s {
|
2009-12-15 16:33:31 -07:00
|
|
|
k1 := k + 1
|
2009-11-08 19:19:06 -07:00
|
|
|
if ch == '.' {
|
2009-11-08 22:48:51 -07:00
|
|
|
if i < 0 {
|
2009-12-15 16:33:31 -07:00
|
|
|
i = k1 // first period
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
|
|
|
if k1 < len(s) && s[k1] <= ' ' {
|
|
|
|
if j < 0 {
|
2009-12-15 16:33:31 -07:00
|
|
|
j = k1 // first period followed by white space
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
|
|
|
if !unicode.IsUpper(prev) {
|
2009-12-15 16:33:31 -07:00
|
|
|
j = k1
|
|
|
|
break
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
2009-11-08 19:19:06 -07:00
|
|
|
}
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
prev = ch
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
2009-11-08 22:48:51 -07:00
|
|
|
|
|
|
|
if j < 0 {
|
|
|
|
// use the next best period
|
2009-12-15 16:33:31 -07:00
|
|
|
j = i
|
2009-11-08 22:48:51 -07:00
|
|
|
if j < 0 {
|
|
|
|
// no period at all, use the entire string
|
2009-11-09 13:07:39 -07:00
|
|
|
j = len(s)
|
2009-11-08 22:48:51 -07:00
|
|
|
}
|
2009-11-08 19:19:06 -07:00
|
|
|
}
|
2009-11-08 22:48:51 -07:00
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
return s[0:j]
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func absolutePath(path, defaultRoot string) string {
|
|
|
|
abspath := fsMap.ToAbsolute(path)
|
|
|
|
if abspath == "" {
|
|
|
|
// no user-defined mapping found; use default mapping
|
|
|
|
abspath = pathutil.Join(defaultRoot, path)
|
|
|
|
}
|
|
|
|
return abspath
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func relativePath(path string) string {
|
|
|
|
relpath := fsMap.ToRelative(path)
|
|
|
|
if relpath == "" && strings.HasPrefix(path, goroot+"/") {
|
|
|
|
// no user-defined mapping found; use default mapping
|
|
|
|
relpath = path[len(goroot)+1:]
|
|
|
|
}
|
|
|
|
// Only if path is an invalid absolute path is relpath == ""
|
|
|
|
// at this point. This should never happen since absolute paths
|
|
|
|
// are only created via godoc for files that do exist. However,
|
|
|
|
// it is ok to return ""; it will simply provide a link to the
|
|
|
|
// top of the pkg or src directories.
|
|
|
|
return relpath
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-02 10:25:39 -07:00
|
|
|
// ----------------------------------------------------------------------------
|
2009-11-02 23:44:01 -07:00
|
|
|
// Package directories
|
2009-11-02 10:25:39 -07:00
|
|
|
|
|
|
|
type Directory struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
Depth int
|
|
|
|
Path string // includes Name
|
|
|
|
Name string
|
|
|
|
Text string // package documentation, if any
|
|
|
|
Dirs []*Directory // subdirectories
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
func newDirTree(path, name string, depth, maxDepth int) *Directory {
|
|
|
|
if depth >= maxDepth {
|
2009-11-02 23:44:01 -07:00
|
|
|
// return a dummy directory so that the parent directory
|
|
|
|
// doesn't get discarded just because we reached the max
|
|
|
|
// directory depth
|
2009-11-09 13:07:39 -07:00
|
|
|
return &Directory{depth, path, name, "", nil}
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
list, _ := ioutil.ReadDir(path) // ignore errors
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// determine number of subdirectories and package files
|
2009-12-15 16:33:31 -07:00
|
|
|
ndirs := 0
|
|
|
|
nfiles := 0
|
|
|
|
text := ""
|
2009-11-02 10:25:39 -07:00
|
|
|
for _, d := range list {
|
2009-11-02 23:44:01 -07:00
|
|
|
switch {
|
|
|
|
case isPkgDir(d):
|
2009-11-09 13:07:39 -07:00
|
|
|
ndirs++
|
2009-11-02 23:44:01 -07:00
|
|
|
case isPkgFile(d):
|
2009-12-15 16:33:31 -07:00
|
|
|
nfiles++
|
2009-11-07 14:17:53 -07:00
|
|
|
if text == "" {
|
|
|
|
// no package documentation yet; take the first found
|
2010-01-27 10:44:28 -07:00
|
|
|
file, err := parser.ParseFile(pathutil.Join(path, d.Name), nil, nil,
|
2009-12-15 16:33:31 -07:00
|
|
|
parser.ParseComments|parser.PackageClauseOnly)
|
2009-11-08 17:47:32 -07:00
|
|
|
if err == nil &&
|
|
|
|
// Also accept fakePkgName, so we get synopses for commmands.
|
|
|
|
// Note: This may lead to incorrect results if there is a
|
|
|
|
// (left-over) "documentation" package somewhere in a package
|
|
|
|
// directory of different name, but this is very unlikely and
|
|
|
|
// against current conventions.
|
2010-01-15 14:27:45 -07:00
|
|
|
(file.Name.Name() == name || file.Name.Name() == fakePkgName) &&
|
2009-11-08 17:47:32 -07:00
|
|
|
file.Doc != nil {
|
2009-11-07 14:17:53 -07:00
|
|
|
// found documentation; extract a synopsys
|
2009-11-09 13:07:39 -07:00
|
|
|
text = firstSentence(doc.CommentText(file.Doc))
|
2009-11-07 14:17:53 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// create subdirectory tree
|
2009-12-15 16:33:31 -07:00
|
|
|
var dirs []*Directory
|
2009-11-02 23:44:01 -07:00
|
|
|
if ndirs > 0 {
|
2009-12-15 16:33:31 -07:00
|
|
|
dirs = make([]*Directory, ndirs)
|
|
|
|
i := 0
|
2009-11-02 10:25:39 -07:00
|
|
|
for _, d := range list {
|
|
|
|
if isPkgDir(d) {
|
2009-12-15 16:33:31 -07:00
|
|
|
dd := newDirTree(pathutil.Join(path, d.Name), d.Name, depth+1, maxDepth)
|
2009-11-02 23:44:01 -07:00
|
|
|
if dd != nil {
|
2009-12-15 16:33:31 -07:00
|
|
|
dirs[i] = dd
|
|
|
|
i++
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
dirs = dirs[0:i]
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
|
|
|
// if there are no package files and no subdirectories
|
|
|
|
// (with package files), ignore the directory
|
|
|
|
if nfiles == 0 && len(dirs) == 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
return &Directory{depth, path, name, text, dirs}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// newDirectory creates a new package directory tree with at most maxDepth
|
2009-11-03 20:40:26 -07:00
|
|
|
// levels, anchored at root which is relative to goroot. The result tree
|
2009-11-02 23:44:01 -07:00
|
|
|
// only contains directories that contain package files or that contain
|
|
|
|
// subdirectories containing package files (transitively).
|
|
|
|
//
|
2009-11-08 17:47:32 -07:00
|
|
|
func newDirectory(root string, maxDepth int) *Directory {
|
2009-12-15 16:33:31 -07:00
|
|
|
d, err := os.Lstat(root)
|
2009-11-02 23:44:01 -07:00
|
|
|
if err != nil || !isPkgDir(d) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return newDirTree(root, d.Name, 0, maxDepth)
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (dir *Directory) walk(c chan<- *Directory, skipRoot bool) {
|
|
|
|
if dir != nil {
|
|
|
|
if !skipRoot {
|
2009-11-09 13:07:39 -07:00
|
|
|
c <- dir
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
for _, d := range dir.Dirs {
|
2009-11-09 13:07:39 -07:00
|
|
|
d.walk(c, false)
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (dir *Directory) iter(skipRoot bool) <-chan *Directory {
|
2009-12-15 16:33:31 -07:00
|
|
|
c := make(chan *Directory)
|
2009-11-08 17:47:32 -07:00
|
|
|
go func() {
|
2009-12-15 16:33:31 -07:00
|
|
|
dir.walk(c, skipRoot)
|
|
|
|
close(c)
|
|
|
|
}()
|
|
|
|
return c
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func (dir *Directory) lookupLocal(name string) *Directory {
|
|
|
|
for _, d := range dir.Dirs {
|
|
|
|
if d.Name == name {
|
|
|
|
return d
|
|
|
|
}
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
return nil
|
|
|
|
}
|
2009-11-02 23:44:01 -07:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
|
|
|
|
// lookup looks for the *Directory for a given path, relative to dir.
|
|
|
|
func (dir *Directory) lookup(path string) *Directory {
|
|
|
|
d := strings.Split(dir.Path, "/", 0)
|
|
|
|
p := strings.Split(path, "/", 0)
|
|
|
|
i := 0
|
|
|
|
for i < len(d) {
|
|
|
|
if i >= len(p) || d[i] != p[i] {
|
|
|
|
return nil
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
i++
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
for dir != nil && i < len(p) {
|
|
|
|
dir = dir.lookupLocal(p[i])
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
return dir
|
2009-11-02 10:25:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// DirEntry describes a directory entry. The Depth and Height values
|
|
|
|
// are useful for presenting an entry in an indented fashion.
|
|
|
|
//
|
|
|
|
type DirEntry struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
Depth int // >= 0
|
|
|
|
Height int // = DirList.MaxHeight - Depth, > 0
|
|
|
|
Path string // includes Name, relative to DirList root
|
|
|
|
Name string
|
|
|
|
Synopsis string
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
type DirList struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
MaxHeight int // directory tree height, > 0
|
|
|
|
List []DirEntry
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// listing creates a (linear) directory listing from a directory tree.
|
|
|
|
// If skipRoot is set, the root directory itself is excluded from the list.
|
|
|
|
//
|
|
|
|
func (root *Directory) listing(skipRoot bool) *DirList {
|
|
|
|
if root == nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// determine number of entries n and maximum height
|
2009-12-15 16:33:31 -07:00
|
|
|
n := 0
|
|
|
|
minDepth := 1 << 30 // infinity
|
|
|
|
maxDepth := 0
|
2009-11-08 17:47:32 -07:00
|
|
|
for d := range root.iter(skipRoot) {
|
2009-12-15 16:33:31 -07:00
|
|
|
n++
|
2009-11-08 17:47:32 -07:00
|
|
|
if minDepth > d.Depth {
|
2009-11-09 13:07:39 -07:00
|
|
|
minDepth = d.Depth
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
if maxDepth < d.Depth {
|
2009-11-09 13:07:39 -07:00
|
|
|
maxDepth = d.Depth
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
maxHeight := maxDepth - minDepth + 1
|
2009-11-08 17:47:32 -07:00
|
|
|
|
|
|
|
if n == 0 {
|
2009-11-09 13:07:39 -07:00
|
|
|
return nil
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// create list
|
2009-12-15 16:33:31 -07:00
|
|
|
list := make([]DirEntry, n)
|
|
|
|
i := 0
|
2009-11-08 17:47:32 -07:00
|
|
|
for d := range root.iter(skipRoot) {
|
2009-12-15 16:33:31 -07:00
|
|
|
p := &list[i]
|
|
|
|
p.Depth = d.Depth - minDepth
|
|
|
|
p.Height = maxHeight - p.Depth
|
2009-11-08 17:47:32 -07:00
|
|
|
// the path is relative to root.Path - remove the root.Path
|
|
|
|
// prefix (the prefix should always be present but avoid
|
|
|
|
// crashes and check)
|
2009-12-15 16:33:31 -07:00
|
|
|
path := d.Path
|
2009-11-08 17:47:32 -07:00
|
|
|
if strings.HasPrefix(d.Path, root.Path) {
|
2009-11-20 12:45:05 -07:00
|
|
|
path = d.Path[len(root.Path):]
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
// remove trailing '/' if any - path must be relative
|
|
|
|
if len(path) > 0 && path[0] == '/' {
|
2009-11-20 12:45:05 -07:00
|
|
|
path = path[1:]
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
p.Path = path
|
|
|
|
p.Name = d.Name
|
|
|
|
p.Synopsis = d.Text
|
|
|
|
i++
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
return &DirList{maxHeight, list}
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// HTML formatting support
|
|
|
|
|
|
|
|
// Styler implements a printer.Styler.
|
|
|
|
type Styler struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
linetags bool
|
|
|
|
highlight string
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 11:00:57 -07:00
|
|
|
// Use the defaultStyler when there is no specific styler.
|
2009-11-16 15:26:29 -07:00
|
|
|
// The defaultStyler does not emit line tags since they may
|
|
|
|
// interfere with tags emitted by templates.
|
|
|
|
// TODO(gri): Should emit line tags at the beginning of a line;
|
|
|
|
// never in the middle of code.
|
2009-11-03 11:00:57 -07:00
|
|
|
var defaultStyler Styler
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) LineTag(line int) (text []byte, tag printer.HTMLTag) {
|
2009-11-16 15:26:29 -07:00
|
|
|
if s.linetags {
|
|
|
|
tag = printer.HTMLTag{fmt.Sprintf(`<a id="L%d">`, line), "</a>"}
|
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Comment(c *ast.Comment, line []byte) (text []byte, tag printer.HTMLTag) {
|
2009-12-15 16:33:31 -07:00
|
|
|
text = line
|
2009-10-22 10:41:38 -06:00
|
|
|
// minimal syntax-coloring of comments for now - people will want more
|
|
|
|
// (don't do anything more until there's a button to turn it on/off)
|
2009-12-15 16:33:31 -07:00
|
|
|
tag = printer.HTMLTag{`<span class="comment">`, "</span>"}
|
|
|
|
return
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) BasicLit(x *ast.BasicLit) (text []byte, tag printer.HTMLTag) {
|
2009-12-15 16:33:31 -07:00
|
|
|
text = x.Value
|
|
|
|
return
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Ident(id *ast.Ident) (text []byte, tag printer.HTMLTag) {
|
2010-01-15 14:27:45 -07:00
|
|
|
text = strings.Bytes(id.Name())
|
|
|
|
if s.highlight == id.Name() {
|
2009-11-09 13:07:39 -07:00
|
|
|
tag = printer.HTMLTag{"<span class=highlight>", "</span>"}
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
func (s *Styler) Token(tok token.Token) (text []byte, tag printer.HTMLTag) {
|
2009-12-15 16:33:31 -07:00
|
|
|
text = strings.Bytes(tok.String())
|
|
|
|
return
|
2009-10-22 10:41:38 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-12-17 15:10:49 -07:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Tab conversion
|
|
|
|
|
|
|
|
var spaces = strings.Bytes(" ") // 16 spaces seems like a good number
|
|
|
|
|
|
|
|
const (
|
|
|
|
indenting = iota
|
|
|
|
collecting
|
|
|
|
)
|
|
|
|
|
|
|
|
// A tconv is an io.Writer filter for converting leading tabs into spaces.
|
|
|
|
type tconv struct {
|
|
|
|
output io.Writer
|
|
|
|
state int // indenting or collecting
|
|
|
|
indent int // valid if state == indenting
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-12-28 18:24:53 -07:00
|
|
|
func (p *tconv) writeIndent() (err os.Error) {
|
|
|
|
i := p.indent
|
2009-12-17 15:10:49 -07:00
|
|
|
for i > len(spaces) {
|
|
|
|
i -= len(spaces)
|
|
|
|
if _, err = p.output.Write(spaces); err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_, err = p.output.Write(spaces[0:i])
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (p *tconv) Write(data []byte) (n int, err os.Error) {
|
|
|
|
pos := 0 // valid if p.state == collecting
|
|
|
|
var b byte
|
|
|
|
for n, b = range data {
|
|
|
|
switch p.state {
|
|
|
|
case indenting:
|
2009-12-28 18:24:53 -07:00
|
|
|
switch b {
|
|
|
|
case '\t', '\v':
|
|
|
|
p.indent += *tabwidth
|
|
|
|
case '\n':
|
|
|
|
p.indent = 0
|
|
|
|
if _, err = p.output.Write(data[n : n+1]); err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
case ' ':
|
2009-12-17 15:10:49 -07:00
|
|
|
p.indent++
|
2009-12-28 18:24:53 -07:00
|
|
|
default:
|
2009-12-17 15:10:49 -07:00
|
|
|
p.state = collecting
|
|
|
|
pos = n
|
2009-12-28 18:24:53 -07:00
|
|
|
if err = p.writeIndent(); err != nil {
|
2009-12-17 15:10:49 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case collecting:
|
|
|
|
if b == '\n' {
|
|
|
|
p.state = indenting
|
|
|
|
p.indent = 0
|
|
|
|
if _, err = p.output.Write(data[pos : n+1]); err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n = len(data)
|
|
|
|
if p.state == collecting {
|
|
|
|
_, err = p.output.Write(data[pos:])
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Templates
|
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write an AST-node to w; optionally html-escaped.
|
2009-10-27 11:34:31 -06:00
|
|
|
func writeNode(w io.Writer, node interface{}, html bool, styler printer.Styler) {
|
2010-01-11 13:58:13 -07:00
|
|
|
mode := printer.TabIndent | printer.UseSpaces
|
2009-07-31 19:04:53 -06:00
|
|
|
if html {
|
2009-11-09 13:07:39 -07:00
|
|
|
mode |= printer.GenHTML
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
2009-12-17 15:10:49 -07:00
|
|
|
// convert trailing tabs into spaces using a tconv filter
|
|
|
|
// to ensure a good outcome in most browsers (there may still
|
|
|
|
// be tabs in comments and strings, but converting those into
|
|
|
|
// the right number of spaces is much harder)
|
|
|
|
(&printer.Config{mode, *tabwidth, styler}).Fprint(&tconv{output: w}, node)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write text to w; optionally html-escaped.
|
|
|
|
func writeText(w io.Writer, text []byte, html bool) {
|
|
|
|
if html {
|
2009-12-15 16:33:31 -07:00
|
|
|
template.HTMLEscape(w, text)
|
|
|
|
return
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
w.Write(text)
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-07-31 19:04:53 -06:00
|
|
|
// Write anything to w; optionally html-escaped.
|
|
|
|
func writeAny(w io.Writer, x interface{}, html bool) {
|
2009-06-16 10:14:06 -06:00
|
|
|
switch v := x.(type) {
|
|
|
|
case []byte:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeText(w, v, html)
|
2009-06-16 10:14:06 -06:00
|
|
|
case string:
|
2009-11-09 13:07:39 -07:00
|
|
|
writeText(w, strings.Bytes(v), html)
|
2009-12-01 10:15:05 -07:00
|
|
|
case ast.Decl, ast.Expr, ast.Stmt, *ast.File:
|
|
|
|
writeNode(w, x, html, &defaultStyler)
|
2009-07-31 19:04:53 -06:00
|
|
|
default:
|
|
|
|
if html {
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
|
|
|
fmt.Fprint(&buf, x)
|
|
|
|
writeText(w, buf.Bytes(), true)
|
2009-07-31 19:04:53 -06:00
|
|
|
} else {
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprint(w, x)
|
2009-07-31 19:04:53 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "html" format.
|
|
|
|
func htmlFmt(w io.Writer, x interface{}, format string) {
|
2009-11-09 13:07:39 -07:00
|
|
|
writeAny(w, x, true)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-13 16:18:56 -07:00
|
|
|
// Template formatter for "html-esc" format.
|
|
|
|
func htmlEscFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
writeAny(&buf, x, false)
|
|
|
|
template.HTMLEscape(w, buf.Bytes())
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// Template formatter for "html-comment" format.
|
|
|
|
func htmlCommentFmt(w io.Writer, x interface{}, format string) {
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
|
|
|
writeAny(&buf, x, false)
|
|
|
|
doc.ToHTML(w, buf.Bytes()) // does html-escaping
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "" (default) format.
|
|
|
|
func textFmt(w io.Writer, x interface{}, format string) {
|
2009-11-09 13:07:39 -07:00
|
|
|
writeAny(w, x, false)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// Template formatter for the various "url-xxx" formats.
|
|
|
|
func urlFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
var path string
|
|
|
|
var line int
|
2009-11-03 20:40:26 -07:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// determine path and position info, if any
|
|
|
|
type positioner interface {
|
2009-12-15 16:33:31 -07:00
|
|
|
Pos() token.Position
|
2009-10-08 16:14:54 -06:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
switch t := x.(type) {
|
|
|
|
case string:
|
|
|
|
path = t
|
|
|
|
case positioner:
|
|
|
|
pos := t.Pos()
|
2009-08-03 10:53:00 -06:00
|
|
|
if pos.IsValid() {
|
2010-02-16 12:20:55 -07:00
|
|
|
path = pos.Filename
|
|
|
|
line = pos.Line
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
|
|
|
|
// map path
|
|
|
|
relpath := relativePath(path)
|
|
|
|
|
|
|
|
// convert to URL
|
|
|
|
switch format {
|
|
|
|
default:
|
|
|
|
// we should never reach here, but be resilient
|
|
|
|
// and assume the url-pkg format instead
|
|
|
|
log.Stderrf("INTERNAL ERROR: urlFmt(%s)", format)
|
|
|
|
fallthrough
|
|
|
|
case "url-pkg":
|
|
|
|
// because of the irregular mapping under goroot
|
|
|
|
// we need to correct certain relative paths
|
|
|
|
if strings.HasPrefix(relpath, "src/pkg/") {
|
|
|
|
relpath = relpath[len("src/pkg/"):]
|
|
|
|
}
|
|
|
|
template.HTMLEscape(w, strings.Bytes(pkgHandler.pattern+relpath))
|
|
|
|
case "url-src":
|
|
|
|
template.HTMLEscape(w, strings.Bytes("/"+relpath))
|
|
|
|
case "url-pos":
|
|
|
|
// line id's in html-printed source are of the
|
|
|
|
// form "L%d" where %d stands for the line number
|
|
|
|
template.HTMLEscape(w, strings.Bytes("/"+relpath))
|
|
|
|
fmt.Fprintf(w, "#L%d", line)
|
|
|
|
}
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 21:17:04 -07:00
|
|
|
// The strings in infoKinds must be properly html-escaped.
|
|
|
|
var infoKinds = [nKinds]string{
|
|
|
|
PackageClause: "package clause",
|
|
|
|
ImportDecl: "import decl",
|
|
|
|
ConstDecl: "const decl",
|
|
|
|
TypeDecl: "type decl",
|
|
|
|
VarDecl: "var decl",
|
|
|
|
FuncDecl: "func decl",
|
|
|
|
MethodDecl: "method decl",
|
|
|
|
Use: "use",
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 21:17:04 -07:00
|
|
|
// Template formatter for "infoKind" format.
|
|
|
|
func infoKindFmt(w io.Writer, x interface{}, format string) {
|
2009-12-15 16:33:31 -07:00
|
|
|
fmt.Fprintf(w, infoKinds[x.(SpotKind)]) // infoKind entries are html-escaped
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "infoLine" format.
|
|
|
|
func infoLineFmt(w io.Writer, x interface{}, format string) {
|
2009-12-15 16:33:31 -07:00
|
|
|
info := x.(SpotInfo)
|
|
|
|
line := info.Lori()
|
2009-10-27 11:34:31 -06:00
|
|
|
if info.IsIndex() {
|
2009-12-15 16:33:31 -07:00
|
|
|
index, _ := searchIndex.get()
|
|
|
|
line = index.(*Index).Snippet(line).Line
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
fmt.Fprintf(w, "%d", line)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "infoSnippet" format.
|
|
|
|
func infoSnippetFmt(w io.Writer, x interface{}, format string) {
|
2009-12-15 16:33:31 -07:00
|
|
|
info := x.(SpotInfo)
|
|
|
|
text := `<span class="alert">no snippet text available</span>`
|
2009-10-27 11:34:31 -06:00
|
|
|
if info.IsIndex() {
|
2009-12-15 16:33:31 -07:00
|
|
|
index, _ := searchIndex.get()
|
2009-10-28 17:19:09 -06:00
|
|
|
// no escaping of snippet text needed;
|
|
|
|
// snippet text is escaped when generated
|
2009-12-15 16:33:31 -07:00
|
|
|
text = index.(*Index).Snippet(info.Lori()).Text
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
fmt.Fprint(w, text)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 17:47:32 -07:00
|
|
|
// Template formatter for "padding" format.
|
|
|
|
func paddingFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
for i := x.(int); i > 0; i-- {
|
2009-11-09 13:07:39 -07:00
|
|
|
fmt.Fprint(w, `<td width="25"></td>`)
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Template formatter for "time" format.
|
|
|
|
func timeFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
// note: os.Dir.Mtime_ns is in uint64 in ns!
|
2009-11-09 22:09:34 -07:00
|
|
|
template.HTMLEscape(w, strings.Bytes(time.SecondsToLocalTime(int64(x.(uint64)/1e9)).String()))
|
2009-11-08 17:47:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-13 14:09:33 -07:00
|
|
|
// Template formatter for "dir/" format.
|
|
|
|
func dirslashFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
if x.(*os.Dir).IsDirectory() {
|
|
|
|
w.Write([]byte{'/'})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// Template formatter for "localname" format.
|
|
|
|
func localnameFmt(w io.Writer, x interface{}, format string) {
|
|
|
|
_, localname := pathutil.Split(x.(string))
|
|
|
|
template.HTMLEscape(w, strings.Bytes(localname))
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
var fmap = template.FormatterMap{
|
|
|
|
"": textFmt,
|
|
|
|
"html": htmlFmt,
|
2010-01-13 16:18:56 -07:00
|
|
|
"html-esc": htmlEscFmt,
|
2009-06-16 10:14:06 -06:00
|
|
|
"html-comment": htmlCommentFmt,
|
2010-02-16 12:20:55 -07:00
|
|
|
"url-pkg": urlFmt,
|
|
|
|
"url-src": urlFmt,
|
|
|
|
"url-pos": urlFmt,
|
2009-11-08 21:17:04 -07:00
|
|
|
"infoKind": infoKindFmt,
|
2009-10-27 11:34:31 -06:00
|
|
|
"infoLine": infoLineFmt,
|
|
|
|
"infoSnippet": infoSnippetFmt,
|
2009-11-08 17:47:32 -07:00
|
|
|
"padding": paddingFmt,
|
|
|
|
"time": timeFmt,
|
2010-01-13 14:09:33 -07:00
|
|
|
"dir/": dirslashFmt,
|
2010-02-16 12:20:55 -07:00
|
|
|
"localname": localnameFmt,
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func readTemplate(name string) *template.Template {
|
2010-02-16 12:20:55 -07:00
|
|
|
path := pathutil.Join(tmplroot, name)
|
2009-12-15 16:33:31 -07:00
|
|
|
data, err := ioutil.ReadFile(path)
|
2009-06-16 10:14:06 -06:00
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Exitf("ReadFile %s: %v", path, err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
t, err := template.Parse(string(data), fmap)
|
2009-11-02 21:35:52 -07:00
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Exitf("%s: %v", name, err)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return t
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-22 10:41:38 -06:00
|
|
|
var (
|
2009-11-08 22:46:20 -07:00
|
|
|
dirlistHTML,
|
2010-02-16 12:20:55 -07:00
|
|
|
errorHTML,
|
2009-11-08 22:46:20 -07:00
|
|
|
godocHTML,
|
|
|
|
packageHTML,
|
2009-11-05 23:25:46 -07:00
|
|
|
packageText,
|
2010-02-16 12:20:55 -07:00
|
|
|
searchHTML *template.Template
|
2009-10-22 10:41:38 -06:00
|
|
|
)
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
func readTemplates() {
|
2010-02-16 12:20:55 -07:00
|
|
|
// have to delay until after flags processing, so that tmplroot is known
|
2009-12-15 16:33:31 -07:00
|
|
|
dirlistHTML = readTemplate("dirlist.html")
|
2010-02-16 12:20:55 -07:00
|
|
|
errorHTML = readTemplate("error.html")
|
2009-12-15 16:33:31 -07:00
|
|
|
godocHTML = readTemplate("godoc.html")
|
|
|
|
packageHTML = readTemplate("package.html")
|
|
|
|
packageText = readTemplate("package.txt")
|
|
|
|
searchHTML = readTemplate("search.html")
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Generic HTML wrapper
|
|
|
|
|
2009-10-27 11:34:31 -06:00
|
|
|
func servePage(c *http.Conn, title, query string, content []byte) {
|
2009-06-16 10:14:06 -06:00
|
|
|
type Data struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
Title string
|
2010-02-16 12:20:55 -07:00
|
|
|
PkgRoots []string
|
2009-12-15 16:33:31 -07:00
|
|
|
Timestamp uint64 // int64 to be compatible with os.Dir.Mtime_ns
|
|
|
|
Query string
|
|
|
|
Content []byte
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
_, ts := fsTree.get()
|
2009-10-01 15:08:00 -06:00
|
|
|
d := Data{
|
2009-10-27 11:34:31 -06:00
|
|
|
Title: title,
|
2010-02-16 12:20:55 -07:00
|
|
|
PkgRoots: fsMap.PrefixList(),
|
2009-12-15 16:33:31 -07:00
|
|
|
Timestamp: uint64(ts) * 1e9, // timestamp in ns
|
2009-10-27 11:34:31 -06:00
|
|
|
Query: query,
|
|
|
|
Content: content,
|
2009-12-15 16:33:31 -07:00
|
|
|
}
|
2009-10-01 15:08:00 -06:00
|
|
|
|
2009-11-08 22:46:20 -07:00
|
|
|
if err := godocHTML.Execute(&d, c); err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
log.Stderrf("godocHTML.Execute: %s", err)
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func serveText(c *http.Conn, text []byte) {
|
2010-02-16 12:20:55 -07:00
|
|
|
c.SetHeader("Content-Type", "text/plain; charset=utf-8")
|
2009-12-15 16:33:31 -07:00
|
|
|
c.Write(text)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Files
|
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
var (
|
2009-12-15 16:33:31 -07:00
|
|
|
tagBegin = strings.Bytes("<!--")
|
|
|
|
tagEnd = strings.Bytes("-->")
|
2009-10-01 15:08:00 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
// commentText returns the text of the first HTML comment in src.
|
|
|
|
func commentText(src []byte) (text string) {
|
2009-12-15 16:33:31 -07:00
|
|
|
i := bytes.Index(src, tagBegin)
|
|
|
|
j := bytes.Index(src, tagEnd)
|
2009-10-01 15:08:00 -06:00
|
|
|
if i >= 0 && j >= i+len(tagBegin) {
|
2009-11-09 13:07:39 -07:00
|
|
|
text = string(bytes.TrimSpace(src[i+len(tagBegin) : j]))
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func serveError(c *http.Conn, r *http.Request, relpath string, err os.Error) {
|
|
|
|
contents := applyTemplate(errorHTML, "errorHTML", err)
|
|
|
|
servePage(c, "File "+relpath, "", contents)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func serveHTMLDoc(c *http.Conn, r *http.Request, abspath, relpath string) {
|
2009-10-01 15:08:00 -06:00
|
|
|
// get HTML body contents
|
2010-02-16 12:20:55 -07:00
|
|
|
src, err := ioutil.ReadFile(abspath)
|
2009-10-01 15:08:00 -06:00
|
|
|
if err != nil {
|
2010-02-16 12:20:55 -07:00
|
|
|
log.Stderrf("ioutil.ReadFile: %s", err)
|
|
|
|
serveError(c, r, relpath, err)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
2010-01-13 14:09:33 -07:00
|
|
|
// if it begins with "<!DOCTYPE " assume it is standalone
|
|
|
|
// html that doesn't need the template wrapping.
|
|
|
|
if bytes.HasPrefix(src, strings.Bytes("<!DOCTYPE ")) {
|
|
|
|
c.Write(src)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
// if it's the language spec, add tags to EBNF productions
|
2010-02-16 12:20:55 -07:00
|
|
|
if strings.HasSuffix(abspath, "go_spec.html") {
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
|
|
|
linkify(&buf, src)
|
|
|
|
src = buf.Bytes()
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
title := commentText(src)
|
|
|
|
servePage(c, title, "", src)
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func applyTemplate(t *template.Template, name string, data interface{}) []byte {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
if err := t.Execute(data, &buf); err != nil {
|
|
|
|
log.Stderrf("%s.Execute: %s", name, err)
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
return buf.Bytes()
|
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
|
|
|
|
func serveGoSource(c *http.Conn, r *http.Request, abspath, relpath string) {
|
|
|
|
file, err := parser.ParseFile(abspath, nil, nil, parser.ParseComments)
|
2009-12-01 10:15:05 -07:00
|
|
|
if err != nil {
|
2010-02-16 12:20:55 -07:00
|
|
|
log.Stderrf("parser.ParseFile: %s", err)
|
|
|
|
serveError(c, r, relpath, err)
|
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
2010-02-16 12:20:55 -07:00
|
|
|
fmt.Fprintln(&buf, "<pre>")
|
|
|
|
writeNode(&buf, file, true, &Styler{linetags: true, highlight: r.FormValue("h")})
|
|
|
|
fmt.Fprintln(&buf, "</pre>")
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
servePage(c, "Source file "+relpath, "", buf.Bytes())
|
2009-09-03 10:58:13 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-07 22:12:46 -07:00
|
|
|
func redirect(c *http.Conn, r *http.Request) (redirected bool) {
|
2009-11-08 22:46:20 -07:00
|
|
|
if canonical := pathutil.Clean(r.URL.Path) + "/"; r.URL.Path != canonical {
|
2009-12-15 16:33:31 -07:00
|
|
|
http.Redirect(c, canonical, http.StatusMovedPermanently)
|
|
|
|
redirected = true
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
// TODO(gri): Should have a mapping from extension to handler, eventually.
|
|
|
|
|
|
|
|
// textExt[x] is true if the extension x indicates a text file, and false otherwise.
|
|
|
|
var textExt = map[string]bool{
|
2009-12-15 16:33:31 -07:00
|
|
|
".css": false, // must be served raw
|
|
|
|
".js": false, // must be served raw
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func isTextFile(path string) bool {
|
|
|
|
// if the extension is known, use it for decision making
|
|
|
|
if isText, found := textExt[pathutil.Ext(path)]; found {
|
2009-11-09 13:07:39 -07:00
|
|
|
return isText
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// the extension is not known; read an initial chunk of
|
|
|
|
// file and check if it looks like correct UTF-8; if it
|
|
|
|
// does, it's probably a text file
|
2009-12-15 16:33:31 -07:00
|
|
|
f, err := os.Open(path, os.O_RDONLY, 0)
|
2009-11-08 01:40:43 -07:00
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
defer f.Close()
|
2009-11-08 01:40:43 -07:00
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf [1024]byte
|
|
|
|
n, err := f.Read(&buf)
|
2009-11-08 01:40:43 -07:00
|
|
|
if err != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
s := string(buf[0:n])
|
|
|
|
n -= utf8.UTFMax // make sure there's enough bytes for a complete unicode char
|
2009-11-08 01:40:43 -07:00
|
|
|
for i, c := range s {
|
|
|
|
if i > n {
|
2009-11-09 13:07:39 -07:00
|
|
|
break
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
if c == 0xFFFD || c < ' ' && c != '\n' && c != '\t' {
|
|
|
|
// decoding error or control character - not a text file
|
2009-11-09 13:07:39 -07:00
|
|
|
return false
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// likely a text file
|
2009-12-15 16:33:31 -07:00
|
|
|
return true
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func serveTextFile(c *http.Conn, r *http.Request, abspath, relpath string) {
|
|
|
|
src, err := ioutil.ReadFile(abspath)
|
2009-11-08 01:40:43 -07:00
|
|
|
if err != nil {
|
2010-02-16 12:20:55 -07:00
|
|
|
log.Stderrf("ioutil.ReadFile: %s", err)
|
|
|
|
serveError(c, r, relpath, err)
|
|
|
|
return
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
var buf bytes.Buffer
|
|
|
|
fmt.Fprintln(&buf, "<pre>")
|
|
|
|
template.HTMLEscape(&buf, src)
|
|
|
|
fmt.Fprintln(&buf, "</pre>")
|
2009-11-08 01:40:43 -07:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
servePage(c, "Text file "+relpath, "", buf.Bytes())
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
func serveDirectory(c *http.Conn, r *http.Request, abspath, relpath string) {
|
2009-11-07 22:12:46 -07:00
|
|
|
if redirect(c, r) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
list, err := ioutil.ReadDir(abspath)
|
2009-11-07 22:12:46 -07:00
|
|
|
if err != nil {
|
2010-02-16 12:20:55 -07:00
|
|
|
log.Stderrf("ioutil.ReadDir: %s", err)
|
|
|
|
serveError(c, r, relpath, err)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
2010-01-13 14:09:33 -07:00
|
|
|
for _, d := range list {
|
|
|
|
if d.IsDirectory() {
|
|
|
|
d.Size = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
contents := applyTemplate(dirlistHTML, "dirlistHTML", list)
|
|
|
|
servePage(c, "Directory "+relpath, "", contents)
|
2009-11-07 22:12:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-01 15:08:00 -06:00
|
|
|
func serveFile(c *http.Conn, r *http.Request) {
|
2010-02-16 12:20:55 -07:00
|
|
|
relpath := r.URL.Path[1:] // serveFile URL paths start with '/'
|
|
|
|
abspath := absolutePath(relpath, goroot)
|
2009-10-01 15:08:00 -06:00
|
|
|
|
2009-06-16 10:14:06 -06:00
|
|
|
// pick off special cases and hand the rest to the standard file server
|
2010-02-16 12:20:55 -07:00
|
|
|
switch r.URL.Path {
|
|
|
|
case "/":
|
|
|
|
serveHTMLDoc(c, r, pathutil.Join(goroot, "doc/root.html"), "doc/root.html")
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
case "/doc/root.html":
|
2009-06-16 10:14:06 -06:00
|
|
|
// hide landing page from its real name
|
2010-02-16 12:20:55 -07:00
|
|
|
http.Redirect(c, "/", http.StatusMovedPermanently)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2010-02-16 12:20:55 -07:00
|
|
|
}
|
2009-10-01 15:08:00 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
switch pathutil.Ext(abspath) {
|
|
|
|
case ".html":
|
|
|
|
if strings.HasSuffix(abspath, "/index.html") {
|
2010-01-06 16:59:03 -07:00
|
|
|
// We'll show index.html for the directory.
|
|
|
|
// Use the dir/ version as canonical instead of dir/index.html.
|
|
|
|
http.Redirect(c, r.URL.Path[0:len(r.URL.Path)-len("index.html")], http.StatusMovedPermanently)
|
|
|
|
return
|
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
serveHTMLDoc(c, r, abspath, relpath)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
case ".go":
|
|
|
|
serveGoSource(c, r, abspath, relpath)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
dir, err := os.Lstat(abspath)
|
2009-11-08 01:40:43 -07:00
|
|
|
if err != nil {
|
2010-02-16 12:20:55 -07:00
|
|
|
log.Stderr(err)
|
|
|
|
serveError(c, r, abspath, err)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
2009-11-07 22:12:46 -07:00
|
|
|
|
2009-11-08 01:40:43 -07:00
|
|
|
if dir != nil && dir.IsDirectory() {
|
2010-01-31 23:17:25 -07:00
|
|
|
if redirect(c, r) {
|
|
|
|
return
|
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
if index := abspath + "/index.html"; isTextFile(index) {
|
|
|
|
serveHTMLDoc(c, r, index, relativePath(index))
|
2010-01-06 16:59:03 -07:00
|
|
|
return
|
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
serveDirectory(c, r, abspath, relpath)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-11-08 01:40:43 -07:00
|
|
|
}
|
2009-11-07 22:12:46 -07:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
if isTextFile(abspath) {
|
|
|
|
serveTextFile(c, r, abspath, relpath)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-11-08 01:40:43 -07:00
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
fileServer.ServeHTTP(c, r)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Packages
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// Fake package file and name for commands. Contains the command documentation.
|
|
|
|
const fakePkgFile = "doc.go"
|
2009-11-03 20:40:26 -07:00
|
|
|
const fakePkgName = "documentation"
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
|
2009-07-29 18:01:09 -06:00
|
|
|
type PageInfo struct {
|
2010-02-16 12:20:55 -07:00
|
|
|
Dirname string // directory containing the package
|
|
|
|
PDoc *doc.PackageDoc // nil if no package found
|
|
|
|
Dirs *DirList // nil if no directory information found
|
|
|
|
IsPkg bool // false if this is not documenting a real package
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
type httpHandler struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
pattern string // url pattern; e.g. "/pkg/"
|
|
|
|
fsRoot string // file system root to which the pattern is mapped
|
|
|
|
isPkg bool // true if this handler serves real package documentation (as opposed to command documentation)
|
2009-07-29 18:01:09 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// getPageInfo returns the PageInfo for a package directory path. If
|
|
|
|
// the parameter try is true, no errors are logged if getPageInfo fails.
|
|
|
|
// If there is no corresponding package in the directory, PageInfo.PDoc
|
|
|
|
// is nil. If there are no subdirectories, PageInfo.Dirs is nil.
|
2009-07-29 18:01:09 -06:00
|
|
|
//
|
2010-02-16 12:20:55 -07:00
|
|
|
func (h *httpHandler) getPageInfo(relpath string, try bool) PageInfo {
|
|
|
|
dirname := absolutePath(relpath, h.fsRoot)
|
2009-07-29 18:01:09 -06:00
|
|
|
|
2009-11-02 23:44:01 -07:00
|
|
|
// filter function to select the desired .go files
|
2009-07-29 18:01:09 -06:00
|
|
|
filter := func(d *os.Dir) bool {
|
2010-02-16 12:20:55 -07:00
|
|
|
// If we are looking at cmd documentation, only accept
|
|
|
|
// the special fakePkgFile containing the documentation.
|
|
|
|
return isPkgFile(d) && (h.isPkg || d.Name == fakePkgFile)
|
2009-12-15 16:33:31 -07:00
|
|
|
}
|
2009-07-29 18:01:09 -06:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
// get package ASTs
|
2010-01-04 18:26:01 -07:00
|
|
|
pkgs, err := parser.ParseDir(dirname, filter, parser.ParseComments)
|
2010-02-01 17:13:58 -07:00
|
|
|
if err != nil && !try {
|
2010-01-04 18:26:01 -07:00
|
|
|
// TODO: errors should be shown instead of an empty directory
|
|
|
|
log.Stderrf("parser.parseDir: %s", err)
|
|
|
|
}
|
2010-02-01 17:13:58 -07:00
|
|
|
if len(pkgs) != 1 && !try {
|
2010-01-04 18:26:01 -07:00
|
|
|
// TODO: should handle multiple packages
|
|
|
|
log.Stderrf("parser.parseDir: found %d packages", len(pkgs))
|
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
|
|
|
|
// Get the best matching package: either the first one, or the
|
|
|
|
// first one whose package name matches the directory name.
|
|
|
|
// The package name is the directory name within its parent
|
|
|
|
// (use dirname instead of path because dirname is clean; i.e.
|
|
|
|
// has no trailing '/').
|
|
|
|
_, pkgname := pathutil.Split(dirname)
|
2010-01-04 18:26:01 -07:00
|
|
|
var pkg *ast.Package
|
2010-02-16 12:20:55 -07:00
|
|
|
for _, p := range pkgs {
|
|
|
|
switch {
|
|
|
|
case pkg == nil:
|
|
|
|
pkg = p
|
|
|
|
case p.Name == pkgname:
|
|
|
|
pkg = p
|
|
|
|
break
|
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-07-29 18:01:09 -06:00
|
|
|
// compute package documentation
|
2009-12-15 16:33:31 -07:00
|
|
|
var pdoc *doc.PackageDoc
|
2009-07-29 18:01:09 -06:00
|
|
|
if pkg != nil {
|
2009-12-15 16:33:31 -07:00
|
|
|
ast.PackageExports(pkg)
|
2010-02-16 12:20:55 -07:00
|
|
|
pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(relpath)) // no trailing '/' in importpath
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2009-11-02 23:44:01 -07:00
|
|
|
// get directory information
|
2009-12-15 16:33:31 -07:00
|
|
|
var dir *Directory
|
2009-11-03 20:40:26 -07:00
|
|
|
if tree, _ := fsTree.get(); tree != nil {
|
2009-11-02 23:44:01 -07:00
|
|
|
// directory tree is present; lookup respective directory
|
|
|
|
// (may still fail if the file system was updated and the
|
2010-02-16 12:20:55 -07:00
|
|
|
// new directory tree has not yet been computed)
|
|
|
|
// TODO(gri) Need to build directory tree for fsMap entries
|
2009-11-09 13:07:39 -07:00
|
|
|
dir = tree.(*Directory).lookup(dirname)
|
2010-02-16 12:20:55 -07:00
|
|
|
}
|
|
|
|
if dir == nil {
|
2009-11-02 23:44:01 -07:00
|
|
|
// no directory tree present (either early after startup
|
2010-02-16 12:20:55 -07:00
|
|
|
// or command-line mode, or we don't build a tree for the
|
|
|
|
// directory; e.g. google3); compute one level for this page
|
2009-11-09 13:07:39 -07:00
|
|
|
dir = newDirectory(dirname, 1)
|
2009-11-02 23:44:01 -07:00
|
|
|
}
|
2009-11-05 23:25:46 -07:00
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
return PageInfo{dirname, pdoc, dir.listing(true), h.isPkg}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-03 20:40:26 -07:00
|
|
|
func (h *httpHandler) ServeHTTP(c *http.Conn, r *http.Request) {
|
2009-11-07 22:12:46 -07:00
|
|
|
if redirect(c, r) {
|
2009-11-09 13:07:39 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
relpath := r.URL.Path[len(h.pattern):]
|
|
|
|
info := h.getPageInfo(relpath, false)
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-28 17:19:09 -06:00
|
|
|
if r.FormValue("f") == "text" {
|
2010-02-16 12:20:55 -07:00
|
|
|
contents := applyTemplate(packageText, "packageText", info)
|
|
|
|
serveText(c, contents)
|
2009-12-15 16:33:31 -07:00
|
|
|
return
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
var title string
|
2009-10-01 15:08:00 -06:00
|
|
|
if info.PDoc != nil {
|
2009-11-03 20:40:26 -07:00
|
|
|
switch {
|
|
|
|
case h.isPkg:
|
2009-11-09 13:07:39 -07:00
|
|
|
title = "Package " + info.PDoc.PackageName
|
2009-11-03 20:40:26 -07:00
|
|
|
case info.PDoc.PackageName == fakePkgName:
|
|
|
|
// assume that the directory name is the command name
|
2010-02-16 12:20:55 -07:00
|
|
|
_, pkgname := pathutil.Split(pathutil.Clean(relpath))
|
2009-12-15 16:33:31 -07:00
|
|
|
title = "Command " + pkgname
|
2009-11-03 20:40:26 -07:00
|
|
|
default:
|
2009-11-09 13:07:39 -07:00
|
|
|
title = "Command " + info.PDoc.PackageName
|
2009-11-03 20:40:26 -07:00
|
|
|
}
|
2010-02-16 12:20:55 -07:00
|
|
|
} else {
|
|
|
|
title = "Directory " + relativePath(info.Dirname)
|
2009-10-01 15:08:00 -06:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
contents := applyTemplate(packageHTML, "packageHTML", info)
|
|
|
|
servePage(c, title, "", contents)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Search
|
|
|
|
|
|
|
|
var searchIndex RWValue
|
|
|
|
|
|
|
|
type SearchResult struct {
|
2009-12-15 16:33:31 -07:00
|
|
|
Query string
|
|
|
|
Hit *LookupResult
|
|
|
|
Alt *AltWords
|
|
|
|
Illegal bool
|
|
|
|
Accurate bool
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func search(c *http.Conn, r *http.Request) {
|
2010-01-21 20:26:15 -07:00
|
|
|
query := strings.TrimSpace(r.FormValue("q"))
|
2009-12-15 16:33:31 -07:00
|
|
|
var result SearchResult
|
2009-10-27 11:34:31 -06:00
|
|
|
|
|
|
|
if index, timestamp := searchIndex.get(); index != nil {
|
2009-12-15 16:33:31 -07:00
|
|
|
result.Query = query
|
|
|
|
result.Hit, result.Alt, result.Illegal = index.(*Index).Lookup(query)
|
|
|
|
_, ts := fsTree.get()
|
|
|
|
result.Accurate = timestamp >= ts
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
2009-12-15 16:33:31 -07:00
|
|
|
var title string
|
2009-10-27 11:34:31 -06:00
|
|
|
if result.Hit != nil {
|
2009-11-09 13:07:39 -07:00
|
|
|
title = fmt.Sprintf(`Results for query %q`, query)
|
2009-10-27 11:34:31 -06:00
|
|
|
} else {
|
2009-11-09 13:07:39 -07:00
|
|
|
title = fmt.Sprintf(`No results found for query %q`, query)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
|
|
|
|
2010-02-16 12:20:55 -07:00
|
|
|
contents := applyTemplate(searchHTML, "searchHTML", result)
|
|
|
|
servePage(c, title, query, contents)
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
2010-02-16 12:20:55 -07:00
|
|
|
// Indexer
|
2009-06-16 10:14:06 -06:00
|
|
|
|
2009-10-30 11:58:53 -06:00
|
|
|
func indexer() {
|
|
|
|
for {
|
2009-12-15 16:33:31 -07:00
|
|
|
_, ts := fsTree.get()
|
2009-10-30 11:58:53 -06:00
|
|
|
if _, timestamp := searchIndex.get(); timestamp < ts {
|
|
|
|
// index possibly out of date - make a new one
|
|
|
|
// (could use a channel to send an explicit signal
|
|
|
|
// from the sync goroutine, but this solution is
|
|
|
|
// more decoupled, trivial, and works well enough)
|
2009-12-15 16:33:31 -07:00
|
|
|
start := time.Nanoseconds()
|
2010-02-16 12:20:55 -07:00
|
|
|
index := NewIndex(goroot)
|
2009-12-15 16:33:31 -07:00
|
|
|
stop := time.Nanoseconds()
|
|
|
|
searchIndex.set(index)
|
2009-10-30 11:58:53 -06:00
|
|
|
if *verbose {
|
2009-12-15 16:33:31 -07:00
|
|
|
secs := float64((stop-start)/1e6) / 1e3
|
|
|
|
nwords, nspots := index.Size()
|
|
|
|
log.Stderrf("index updated (%gs, %d unique words, %d spots)", secs, nwords, nspots)
|
2009-10-27 11:34:31 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|
2009-12-15 16:33:31 -07:00
|
|
|
time.Sleep(1 * 60e9) // try once a minute
|
2009-08-03 10:53:00 -06:00
|
|
|
}
|
2009-06-16 10:14:06 -06:00
|
|
|
}
|