2013-07-17 01:17:12 -06:00
|
|
|
// Copyright 2013 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package godoc
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2014-03-14 16:58:22 -06:00
|
|
|
"encoding/json"
|
2018-01-19 00:13:17 -07:00
|
|
|
"errors"
|
2013-07-17 01:17:12 -06:00
|
|
|
"fmt"
|
|
|
|
"go/ast"
|
|
|
|
"go/build"
|
|
|
|
"go/doc"
|
|
|
|
"go/token"
|
|
|
|
htmlpkg "html"
|
2014-03-14 16:58:22 -06:00
|
|
|
htmltemplate "html/template"
|
2013-07-17 01:17:12 -06:00
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
pathpkg "path"
|
|
|
|
"path/filepath"
|
2013-08-27 17:39:02 -06:00
|
|
|
"sort"
|
2013-07-17 01:17:12 -06:00
|
|
|
"strings"
|
|
|
|
"text/template"
|
|
|
|
"time"
|
|
|
|
|
2014-11-09 14:50:40 -07:00
|
|
|
"golang.org/x/tools/godoc/analysis"
|
|
|
|
"golang.org/x/tools/godoc/util"
|
|
|
|
"golang.org/x/tools/godoc/vfs"
|
2013-07-17 01:17:12 -06:00
|
|
|
)
|
|
|
|
|
2013-07-18 18:27:53 -06:00
|
|
|
// handlerServer is a migration from an old godoc http Handler type.
|
|
|
|
// This should probably merge into something else.
|
|
|
|
type handlerServer struct {
|
2015-08-27 19:00:04 -06:00
|
|
|
p *Presentation
|
|
|
|
c *Corpus // copy of p.Corpus
|
|
|
|
pattern string // url pattern; e.g. "/pkg/"
|
|
|
|
stripPrefix string // prefix to strip from import path; e.g. "pkg/"
|
|
|
|
fsRoot string // file system root to which the pattern is mapped; e.g. "/src"
|
|
|
|
exclude []string // file system paths to exclude; e.g. "/src/cmd"
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
|
|
|
|
2013-07-18 18:27:53 -06:00
|
|
|
func (s *handlerServer) registerWithMux(mux *http.ServeMux) {
|
2013-07-17 01:17:12 -06:00
|
|
|
mux.Handle(s.pattern, s)
|
|
|
|
}
|
|
|
|
|
|
|
|
// getPageInfo returns the PageInfo for a package directory abspath. If the
|
|
|
|
// parameter genAST is set, an AST containing only the package exports is
|
|
|
|
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
|
|
|
|
// is extracted from the AST. If there is no corresponding package in the
|
|
|
|
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
|
|
|
|
// directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is
|
|
|
|
// set to the respective error but the error is not logged.
|
|
|
|
//
|
2014-12-11 08:10:04 -07:00
|
|
|
func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode, goos, goarch string) *PageInfo {
|
2017-01-18 11:52:36 -07:00
|
|
|
info := &PageInfo{Dirname: abspath, Mode: mode}
|
2013-07-17 01:17:12 -06:00
|
|
|
|
|
|
|
// Restrict to the package files that would be used when building
|
|
|
|
// the package on this system. This makes sure that if there are
|
|
|
|
// separate implementations for, say, Windows vs Unix, we don't
|
|
|
|
// jumble them all together.
|
2014-12-11 08:10:04 -07:00
|
|
|
// Note: If goos/goarch aren't set, the current binary's GOOS/GOARCH
|
|
|
|
// are used.
|
2013-07-17 01:17:12 -06:00
|
|
|
ctxt := build.Default
|
|
|
|
ctxt.IsAbsPath = pathpkg.IsAbs
|
cmd/godoc, godoc: implement build.Context.IsDir, update expected error string
Prior to this change, handlerServer.GetPageInfo created a build.Context
starting with build.Default, and provided custom implementations for
its IsAbsPath, ReadDir, and OpenFile funcs. Those funcs would operate
on h.c.fs virtual filesystem.
https://godoc.org/go/build#Context.IsDir is documented as:
// IsDir reports whether the path names a directory.
// If IsDir is nil, Import calls os.Stat and uses the result's IsDir method.
IsDir func(path string) bool
IsDir was left as nil, and so the default implementation was used.
The default implementation uses os.Stat and isn't aware of the h.c.fs
virtual filesystem.
This appears to have been harmless in the past, but after the change to
go/build in https://golang.org/cl/33158, it started to interfere with
the operation of godoc. The command godoc began to fail to resolve
relative import path "." in directories that exist, because the
incorrect IsDir implementation was looking in real filesystem, rather
than the virtual one:
$ ./godoc fmt
2017/03/04 18:59:50 cannot find package "." in:
/target
$ ./godoc -http=localhost:6060
2017/03/04 19:00:07 cannot find package "." in:
/src/fmt
Providing a custom implementation of IsDir that is aware of the h.c.fs
virtual filesystem, and performs a stat operation on the correct path
there resolves that problem. It also fixes the failing tests.
Additionally, because the exact error string returned from
Context.Import has changed after https://golang.org/cl/33158,
and now contains the text "package not found" rather than the
underlying error text from filesystem caused by a failed ReadDir
operation, the expected error message from "./godoc nonexistingpkg"
in a test needed to be updated to "cannot find package". It's no longer
dependent on the operating system.
It might be desirable to provide more relevant detail in the error
message from cmd/godoc when a package is not found, but that should be
determined and done in a followup CL. The scope of this one is to fix
normal functionality.
This change follows update to go/build in https://golang.org/cl/33158.
Helps golang/go#19401.
Change-Id: I00e2f746ec4a2fe7e640218adce75f15bdf29aaf
Reviewed-on: https://go-review.googlesource.com/37768
Run-TryBot: Brad Fitzpatrick <bradfitz@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
2017-03-04 17:06:07 -07:00
|
|
|
ctxt.IsDir = func(path string) bool {
|
|
|
|
fi, err := h.c.fs.Stat(filepath.ToSlash(path))
|
|
|
|
return err == nil && fi.IsDir()
|
|
|
|
}
|
2013-07-17 21:14:09 -06:00
|
|
|
ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
|
2014-10-13 10:47:02 -06:00
|
|
|
f, err := h.c.fs.ReadDir(filepath.ToSlash(dir))
|
|
|
|
filtered := make([]os.FileInfo, 0, len(f))
|
|
|
|
for _, i := range f {
|
|
|
|
if mode&NoFiltering != 0 || i.Name() != "internal" {
|
|
|
|
filtered = append(filtered, i)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return filtered, err
|
2013-07-17 21:14:09 -06:00
|
|
|
}
|
|
|
|
ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) {
|
|
|
|
data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return ioutil.NopCloser(bytes.NewReader(data)), nil
|
|
|
|
}
|
|
|
|
|
2014-12-11 08:10:04 -07:00
|
|
|
if goos != "" {
|
|
|
|
ctxt.GOOS = goos
|
|
|
|
}
|
|
|
|
if goarch != "" {
|
|
|
|
ctxt.GOARCH = goarch
|
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
pkginfo, err := ctxt.ImportDir(abspath, 0)
|
|
|
|
// continue if there are no Go source files; we still want the directory info
|
|
|
|
if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
|
|
|
|
info.Err = err
|
|
|
|
return info
|
|
|
|
}
|
|
|
|
|
|
|
|
// collect package files
|
|
|
|
pkgname := pkginfo.Name
|
|
|
|
pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
|
|
|
|
if len(pkgfiles) == 0 {
|
|
|
|
// Commands written in C have no .go files in the build.
|
|
|
|
// Instead, documentation may be found in an ignored file.
|
|
|
|
// The file may be ignored via an explicit +build ignore
|
|
|
|
// constraint (recommended), or by defining the package
|
|
|
|
// documentation (historic).
|
|
|
|
pkgname = "main" // assume package main since pkginfo.Name == ""
|
|
|
|
pkgfiles = pkginfo.IgnoredGoFiles
|
|
|
|
}
|
|
|
|
|
|
|
|
// get package information, if any
|
|
|
|
if len(pkgfiles) > 0 {
|
|
|
|
// build package AST
|
|
|
|
fset := token.NewFileSet()
|
2014-01-30 04:28:19 -07:00
|
|
|
files, err := h.c.parseFiles(fset, relpath, abspath, pkgfiles)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
info.Err = err
|
|
|
|
return info
|
|
|
|
}
|
|
|
|
|
|
|
|
// ignore any errors - they are due to unresolved identifiers
|
|
|
|
pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil)
|
|
|
|
|
|
|
|
// extract package documentation
|
|
|
|
info.FSet = fset
|
|
|
|
if mode&ShowSource == 0 {
|
|
|
|
// show extracted documentation
|
|
|
|
var m doc.Mode
|
|
|
|
if mode&NoFiltering != 0 {
|
2013-08-27 17:39:02 -06:00
|
|
|
m |= doc.AllDecls
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
|
|
|
if mode&AllMethods != 0 {
|
|
|
|
m |= doc.AllMethods
|
|
|
|
}
|
|
|
|
info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
|
2014-02-04 11:26:38 -07:00
|
|
|
if mode&NoTypeAssoc != 0 {
|
2013-08-27 17:39:02 -06:00
|
|
|
for _, t := range info.PDoc.Types {
|
2014-02-04 11:26:38 -07:00
|
|
|
info.PDoc.Consts = append(info.PDoc.Consts, t.Consts...)
|
|
|
|
info.PDoc.Vars = append(info.PDoc.Vars, t.Vars...)
|
2013-08-27 17:39:02 -06:00
|
|
|
info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...)
|
2014-02-04 11:26:38 -07:00
|
|
|
t.Consts = nil
|
|
|
|
t.Vars = nil
|
2013-08-27 17:39:02 -06:00
|
|
|
t.Funcs = nil
|
|
|
|
}
|
2014-02-04 11:26:38 -07:00
|
|
|
// for now we cannot easily sort consts and vars since
|
|
|
|
// go/doc.Value doesn't export the order information
|
2013-08-27 17:39:02 -06:00
|
|
|
sort.Sort(funcsByName(info.PDoc.Funcs))
|
|
|
|
}
|
2013-07-17 01:17:12 -06:00
|
|
|
|
|
|
|
// collect examples
|
|
|
|
testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...)
|
2014-01-30 04:28:19 -07:00
|
|
|
files, err = h.c.parseFiles(fset, relpath, abspath, testfiles)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
log.Println("parsing examples:", err)
|
|
|
|
}
|
2013-11-14 10:01:08 -07:00
|
|
|
info.Examples = collectExamples(h.c, pkg, files)
|
2013-07-17 01:17:12 -06:00
|
|
|
|
|
|
|
// collect any notes that we want to show
|
|
|
|
if info.PDoc.Notes != nil {
|
|
|
|
// could regexp.Compile only once per godoc, but probably not worth it
|
2013-07-17 21:14:09 -06:00
|
|
|
if rx := h.p.NotesRx; rx != nil {
|
2013-07-17 01:17:12 -06:00
|
|
|
for m, n := range info.PDoc.Notes {
|
|
|
|
if rx.MatchString(m) {
|
|
|
|
if info.Notes == nil {
|
|
|
|
info.Notes = make(map[string][]*doc.Note)
|
|
|
|
}
|
|
|
|
info.Notes[m] = n
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
// show source code
|
|
|
|
// TODO(gri) Consider eliminating export filtering in this mode,
|
|
|
|
// or perhaps eliminating the mode altogether.
|
|
|
|
if mode&NoFiltering == 0 {
|
|
|
|
packageExports(fset, pkg)
|
|
|
|
}
|
2014-01-30 04:28:19 -07:00
|
|
|
info.PAst = files
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
|
|
|
info.IsMain = pkgname == "main"
|
|
|
|
}
|
|
|
|
|
|
|
|
// get directory information, if any
|
|
|
|
var dir *Directory
|
|
|
|
var timestamp time.Time
|
2013-07-17 21:14:09 -06:00
|
|
|
if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil {
|
2013-07-17 01:17:12 -06:00
|
|
|
// directory tree is present; lookup respective directory
|
|
|
|
// (may still fail if the file system was updated and the
|
|
|
|
// new directory tree has not yet been computed)
|
|
|
|
dir = tree.(*Directory).lookup(abspath)
|
|
|
|
timestamp = ts
|
|
|
|
}
|
|
|
|
if dir == nil {
|
|
|
|
// no directory tree present (too early after startup or
|
|
|
|
// command-line mode); compute one level for this page
|
|
|
|
// note: cannot use path filter here because in general
|
|
|
|
// it doesn't contain the FSTree path
|
2013-07-17 17:52:45 -06:00
|
|
|
dir = h.c.newDirectory(abspath, 1)
|
2013-07-17 01:17:12 -06:00
|
|
|
timestamp = time.Now()
|
|
|
|
}
|
2014-10-13 10:47:02 -06:00
|
|
|
info.Dirs = dir.listing(true, func(path string) bool { return h.includePath(path, mode) })
|
2017-01-18 11:52:36 -07:00
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
info.DirTime = timestamp
|
|
|
|
info.DirFlat = mode&FlatDir != 0
|
|
|
|
|
|
|
|
return info
|
|
|
|
}
|
|
|
|
|
2014-10-13 10:47:02 -06:00
|
|
|
func (h *handlerServer) includePath(path string, mode PageInfoMode) (r bool) {
|
|
|
|
// if the path is under one of the exclusion paths, don't list.
|
|
|
|
for _, e := range h.exclude {
|
|
|
|
if strings.HasPrefix(path, e) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if the path includes 'internal', don't list unless we are in the NoFiltering mode.
|
|
|
|
if mode&NoFiltering != 0 {
|
|
|
|
return true
|
|
|
|
}
|
2016-01-12 19:06:35 -07:00
|
|
|
if strings.Contains(path, "internal") || strings.Contains(path, "vendor") {
|
2014-10-27 19:26:29 -06:00
|
|
|
for _, c := range strings.Split(filepath.Clean(path), string(os.PathSeparator)) {
|
2016-01-12 19:06:35 -07:00
|
|
|
if c == "internal" || c == "vendor" {
|
2014-10-13 10:47:02 -06:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2013-08-27 17:39:02 -06:00
|
|
|
type funcsByName []*doc.Func
|
|
|
|
|
|
|
|
func (s funcsByName) Len() int { return len(s) }
|
|
|
|
func (s funcsByName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
|
|
|
func (s funcsByName) Less(i, j int) bool { return s[i].Name < s[j].Name }
|
|
|
|
|
2013-07-18 18:27:53 -06:00
|
|
|
func (h *handlerServer) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
2013-07-17 01:17:12 -06:00
|
|
|
if redirect(w, r) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-08-27 19:00:04 -06:00
|
|
|
relpath := pathpkg.Clean(r.URL.Path[len(h.stripPrefix)+1:])
|
2018-01-19 00:13:17 -07:00
|
|
|
|
|
|
|
if !h.corpusInitialized() {
|
|
|
|
h.p.ServeError(w, r, relpath, errors.New("Scan is not yet complete. Please retry after a few moments"))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
abspath := pathpkg.Join(h.fsRoot, relpath)
|
2013-07-18 18:27:53 -06:00
|
|
|
mode := h.p.GetPageInfoMode(r)
|
|
|
|
if relpath == builtinPkgPath {
|
2014-02-04 11:26:38 -07:00
|
|
|
mode = NoFiltering | NoTypeAssoc
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
2014-12-11 08:10:04 -07:00
|
|
|
info := h.GetPageInfo(abspath, relpath, mode, r.FormValue("GOOS"), r.FormValue("GOARCH"))
|
2013-07-17 01:17:12 -06:00
|
|
|
if info.Err != nil {
|
|
|
|
log.Print(info.Err)
|
2013-07-17 17:52:45 -06:00
|
|
|
h.p.ServeError(w, r, relpath, info.Err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if mode&NoHTML != 0 {
|
2013-07-17 21:51:17 -06:00
|
|
|
h.p.ServeText(w, applyTemplate(h.p.PackageText, "packageText", info))
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var tabtitle, title, subtitle string
|
|
|
|
switch {
|
|
|
|
case info.PAst != nil:
|
2014-01-30 04:28:19 -07:00
|
|
|
for _, ast := range info.PAst {
|
|
|
|
tabtitle = ast.Name.Name
|
|
|
|
break
|
|
|
|
}
|
2013-07-17 01:17:12 -06:00
|
|
|
case info.PDoc != nil:
|
|
|
|
tabtitle = info.PDoc.Name
|
|
|
|
default:
|
|
|
|
tabtitle = info.Dirname
|
|
|
|
title = "Directory "
|
2013-07-17 21:14:09 -06:00
|
|
|
if h.p.ShowTimestamps {
|
2013-07-17 01:17:12 -06:00
|
|
|
subtitle = "Last update: " + info.DirTime.String()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if title == "" {
|
|
|
|
if info.IsMain {
|
|
|
|
// assume that the directory name is the command name
|
|
|
|
_, tabtitle = pathpkg.Split(relpath)
|
|
|
|
title = "Command "
|
|
|
|
} else {
|
|
|
|
title = "Package "
|
|
|
|
}
|
|
|
|
}
|
|
|
|
title += tabtitle
|
|
|
|
|
|
|
|
// special cases for top-level package/command directories
|
|
|
|
switch tabtitle {
|
2014-09-10 07:02:54 -06:00
|
|
|
case "/src":
|
2014-08-20 17:53:57 -06:00
|
|
|
title = "Packages"
|
2013-07-17 01:17:12 -06:00
|
|
|
tabtitle = "Packages"
|
|
|
|
case "/src/cmd":
|
2014-08-20 17:53:57 -06:00
|
|
|
title = "Commands"
|
2013-07-17 01:17:12 -06:00
|
|
|
tabtitle = "Commands"
|
|
|
|
}
|
|
|
|
|
2014-03-14 16:58:22 -06:00
|
|
|
// Emit JSON array for type information.
|
2014-07-09 05:59:55 -06:00
|
|
|
pi := h.c.Analysis.PackageInfo(relpath)
|
|
|
|
info.CallGraphIndex = pi.CallGraphIndex
|
|
|
|
info.CallGraph = htmltemplate.JS(marshalJSON(pi.CallGraph))
|
|
|
|
info.AnalysisData = htmltemplate.JS(marshalJSON(pi.Types))
|
2014-03-14 16:58:22 -06:00
|
|
|
info.TypeInfoIndex = make(map[string]int)
|
2014-07-09 05:59:55 -06:00
|
|
|
for i, ti := range pi.Types {
|
2014-03-14 16:58:22 -06:00
|
|
|
info.TypeInfoIndex[ti.Name] = i
|
|
|
|
}
|
|
|
|
|
2017-08-02 14:37:59 -06:00
|
|
|
info.GoogleCN = googleCN(r)
|
2013-07-17 17:52:45 -06:00
|
|
|
h.p.ServePage(w, Page{
|
2013-07-17 01:17:12 -06:00
|
|
|
Title: title,
|
|
|
|
Tabtitle: tabtitle,
|
|
|
|
Subtitle: subtitle,
|
2013-07-17 21:51:17 -06:00
|
|
|
Body: applyTemplate(h.p.PackageHTML, "packageHTML", info),
|
2017-08-02 14:37:59 -06:00
|
|
|
GoogleCN: info.GoogleCN,
|
2013-07-17 01:17:12 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-01-19 00:13:17 -07:00
|
|
|
func (h *handlerServer) corpusInitialized() bool {
|
|
|
|
h.c.initMu.RLock()
|
|
|
|
defer h.c.initMu.RUnlock()
|
|
|
|
return h.c.initDone
|
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
type PageInfoMode uint
|
|
|
|
|
|
|
|
const (
|
2017-01-18 11:52:36 -07:00
|
|
|
PageInfoModeQueryString = "m" // query string where PageInfoMode is stored
|
|
|
|
|
2014-02-04 11:26:38 -07:00
|
|
|
NoFiltering PageInfoMode = 1 << iota // do not filter exports
|
|
|
|
AllMethods // show all embedded methods
|
|
|
|
ShowSource // show source code, do not extract documentation
|
|
|
|
NoHTML // show result in textual form, do not generate HTML
|
|
|
|
FlatDir // show directory in a flat (non-indented) manner
|
|
|
|
NoTypeAssoc // don't associate consts, vars, and factory functions with types
|
2013-07-17 01:17:12 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
// modeNames defines names for each PageInfoMode flag.
|
|
|
|
var modeNames = map[string]PageInfoMode{
|
|
|
|
"all": NoFiltering,
|
|
|
|
"methods": AllMethods,
|
|
|
|
"src": ShowSource,
|
|
|
|
"text": NoHTML,
|
|
|
|
"flat": FlatDir,
|
|
|
|
}
|
|
|
|
|
2017-01-18 11:52:36 -07:00
|
|
|
// generate a query string for persisting PageInfoMode between pages.
|
|
|
|
func modeQueryString(mode PageInfoMode) string {
|
|
|
|
if modeNames := mode.names(); len(modeNames) > 0 {
|
|
|
|
return "?m=" + strings.Join(modeNames, ",")
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
|
|
|
// alphabetically sorted names of active flags for a PageInfoMode.
|
|
|
|
func (m PageInfoMode) names() []string {
|
|
|
|
var names []string
|
|
|
|
for name, mode := range modeNames {
|
|
|
|
if m&mode != 0 {
|
|
|
|
names = append(names, name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sort.Strings(names)
|
|
|
|
return names
|
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
// GetPageInfoMode computes the PageInfoMode flags by analyzing the request
|
|
|
|
// URL form value "m". It is value is a comma-separated list of mode names
|
|
|
|
// as defined by modeNames (e.g.: m=src,text).
|
2013-07-18 18:27:53 -06:00
|
|
|
func (p *Presentation) GetPageInfoMode(r *http.Request) PageInfoMode {
|
2013-07-17 01:17:12 -06:00
|
|
|
var mode PageInfoMode
|
2017-01-18 11:52:36 -07:00
|
|
|
for _, k := range strings.Split(r.FormValue(PageInfoModeQueryString), ",") {
|
2013-07-17 01:17:12 -06:00
|
|
|
if m, found := modeNames[strings.TrimSpace(k)]; found {
|
|
|
|
mode |= m
|
|
|
|
}
|
|
|
|
}
|
2013-07-18 18:27:53 -06:00
|
|
|
if p.AdjustPageInfoMode != nil {
|
|
|
|
mode = p.AdjustPageInfoMode(r, mode)
|
|
|
|
}
|
2013-07-17 01:17:12 -06:00
|
|
|
return mode
|
|
|
|
}
|
|
|
|
|
|
|
|
// poorMansImporter returns a (dummy) package object named
|
|
|
|
// by the last path component of the provided package path
|
|
|
|
// (as is the convention for packages). This is sufficient
|
|
|
|
// to resolve package identifiers without doing an actual
|
|
|
|
// import. It never returns an error.
|
|
|
|
//
|
|
|
|
func poorMansImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
|
|
|
|
pkg := imports[path]
|
|
|
|
if pkg == nil {
|
|
|
|
// note that strings.LastIndex returns -1 if there is no "/"
|
|
|
|
pkg = ast.NewObj(ast.Pkg, path[strings.LastIndex(path, "/")+1:])
|
|
|
|
pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
|
|
|
|
imports[path] = pkg
|
|
|
|
}
|
|
|
|
return pkg, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// globalNames returns a set of the names declared by all package-level
|
|
|
|
// declarations. Method names are returned in the form Receiver_Method.
|
|
|
|
func globalNames(pkg *ast.Package) map[string]bool {
|
|
|
|
names := make(map[string]bool)
|
|
|
|
for _, file := range pkg.Files {
|
|
|
|
for _, decl := range file.Decls {
|
|
|
|
addNames(names, decl)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return names
|
|
|
|
}
|
|
|
|
|
|
|
|
// collectExamples collects examples for pkg from testfiles.
|
2013-11-14 10:01:08 -07:00
|
|
|
func collectExamples(c *Corpus, pkg *ast.Package, testfiles map[string]*ast.File) []*doc.Example {
|
2013-07-17 01:17:12 -06:00
|
|
|
var files []*ast.File
|
|
|
|
for _, f := range testfiles {
|
|
|
|
files = append(files, f)
|
|
|
|
}
|
|
|
|
|
|
|
|
var examples []*doc.Example
|
|
|
|
globals := globalNames(pkg)
|
|
|
|
for _, e := range doc.Examples(files...) {
|
|
|
|
name := stripExampleSuffix(e.Name)
|
|
|
|
if name == "" || globals[name] {
|
|
|
|
examples = append(examples, e)
|
2013-11-14 10:01:08 -07:00
|
|
|
} else if c.Verbose {
|
2013-07-17 01:17:12 -06:00
|
|
|
log.Printf("skipping example 'Example%s' because '%s' is not a known function or type", e.Name, e.Name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return examples
|
|
|
|
}
|
|
|
|
|
|
|
|
// addNames adds the names declared by decl to the names set.
|
|
|
|
// Method names are added in the form ReceiverTypeName_Method.
|
|
|
|
func addNames(names map[string]bool, decl ast.Decl) {
|
|
|
|
switch d := decl.(type) {
|
|
|
|
case *ast.FuncDecl:
|
|
|
|
name := d.Name.Name
|
|
|
|
if d.Recv != nil {
|
|
|
|
var typeName string
|
|
|
|
switch r := d.Recv.List[0].Type.(type) {
|
|
|
|
case *ast.StarExpr:
|
|
|
|
typeName = r.X.(*ast.Ident).Name
|
|
|
|
case *ast.Ident:
|
|
|
|
typeName = r.Name
|
|
|
|
}
|
|
|
|
name = typeName + "_" + name
|
|
|
|
}
|
|
|
|
names[name] = true
|
|
|
|
case *ast.GenDecl:
|
|
|
|
for _, spec := range d.Specs {
|
|
|
|
switch s := spec.(type) {
|
|
|
|
case *ast.TypeSpec:
|
|
|
|
names[s.Name.Name] = true
|
|
|
|
case *ast.ValueSpec:
|
|
|
|
for _, id := range s.Names {
|
|
|
|
names[id.Name] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// packageExports is a local implementation of ast.PackageExports
|
|
|
|
// which correctly updates each package file's comment list.
|
|
|
|
// (The ast.PackageExports signature is frozen, hence the local
|
|
|
|
// implementation).
|
|
|
|
//
|
|
|
|
func packageExports(fset *token.FileSet, pkg *ast.Package) {
|
|
|
|
for _, src := range pkg.Files {
|
|
|
|
cmap := ast.NewCommentMap(fset, src, src.Comments)
|
|
|
|
ast.FileExports(src)
|
|
|
|
src.Comments = cmap.Filter(src).Comments()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func applyTemplate(t *template.Template, name string, data interface{}) []byte {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
if err := t.Execute(&buf, data); err != nil {
|
|
|
|
log.Printf("%s.Execute: %s", name, err)
|
|
|
|
}
|
|
|
|
return buf.Bytes()
|
|
|
|
}
|
|
|
|
|
2014-01-29 08:53:45 -07:00
|
|
|
type writerCapturesErr struct {
|
|
|
|
w io.Writer
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (w *writerCapturesErr) Write(p []byte) (int, error) {
|
|
|
|
n, err := w.w.Write(p)
|
|
|
|
if err != nil {
|
|
|
|
w.err = err
|
|
|
|
}
|
|
|
|
return n, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// applyTemplateToResponseWriter uses an http.ResponseWriter as the io.Writer
|
|
|
|
// for the call to template.Execute. It uses an io.Writer wrapper to capture
|
2015-07-15 12:49:51 -06:00
|
|
|
// errors from the underlying http.ResponseWriter. Errors are logged only when
|
|
|
|
// they come from the template processing and not the Writer; this avoid
|
|
|
|
// polluting log files with error messages due to networking issues, such as
|
|
|
|
// client disconnects and http HEAD protocol violations.
|
2014-01-29 08:53:45 -07:00
|
|
|
func applyTemplateToResponseWriter(rw http.ResponseWriter, t *template.Template, data interface{}) {
|
|
|
|
w := &writerCapturesErr{w: rw}
|
|
|
|
err := t.Execute(w, data)
|
|
|
|
// There are some cases where template.Execute does not return an error when
|
|
|
|
// rw returns an error, and some where it does. So check w.err first.
|
2015-07-15 12:49:51 -06:00
|
|
|
if w.err == nil && err != nil {
|
2014-01-29 08:53:45 -07:00
|
|
|
// Log template errors.
|
|
|
|
log.Printf("%s.Execute: %s", t.Name(), err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
func redirect(w http.ResponseWriter, r *http.Request) (redirected bool) {
|
|
|
|
canonical := pathpkg.Clean(r.URL.Path)
|
|
|
|
if !strings.HasSuffix(canonical, "/") {
|
|
|
|
canonical += "/"
|
|
|
|
}
|
|
|
|
if r.URL.Path != canonical {
|
|
|
|
url := *r.URL
|
|
|
|
url.Path = canonical
|
|
|
|
http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
|
|
|
|
redirected = true
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func redirectFile(w http.ResponseWriter, r *http.Request) (redirected bool) {
|
|
|
|
c := pathpkg.Clean(r.URL.Path)
|
|
|
|
c = strings.TrimRight(c, "/")
|
|
|
|
if r.URL.Path != c {
|
|
|
|
url := *r.URL
|
|
|
|
url.Path = c
|
|
|
|
http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
|
|
|
|
redirected = true
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
func (p *Presentation) serveTextFile(w http.ResponseWriter, r *http.Request, abspath, relpath, title string) {
|
|
|
|
src, err := vfs.ReadFile(p.Corpus.fs, abspath)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("ReadFile: %s", err)
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-01-18 11:52:36 -07:00
|
|
|
if r.FormValue(PageInfoModeQueryString) == "text" {
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeText(w, src)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-03-14 16:58:22 -06:00
|
|
|
h := r.FormValue("h")
|
|
|
|
s := RangeSelection(r.FormValue("s"))
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
var buf bytes.Buffer
|
2014-03-14 16:58:22 -06:00
|
|
|
if pathpkg.Ext(abspath) == ".go" {
|
2014-09-10 07:02:54 -06:00
|
|
|
// Find markup links for this file (e.g. "/src/fmt/print.go").
|
2014-07-09 05:59:55 -06:00
|
|
|
fi := p.Corpus.Analysis.FileInfo(abspath)
|
2014-03-14 16:58:22 -06:00
|
|
|
buf.WriteString("<script type='text/javascript'>document.ANALYSIS_DATA = ")
|
2014-07-09 05:59:55 -06:00
|
|
|
buf.Write(marshalJSON(fi.Data))
|
2014-03-14 16:58:22 -06:00
|
|
|
buf.WriteString(";</script>\n")
|
|
|
|
|
2014-07-09 05:59:55 -06:00
|
|
|
if status := p.Corpus.Analysis.Status(); status != "" {
|
|
|
|
buf.WriteString("<a href='/lib/godoc/analysis/help.html'>Static analysis features</a> ")
|
|
|
|
// TODO(adonovan): show analysis status at per-file granularity.
|
|
|
|
fmt.Fprintf(&buf, "<span style='color: grey'>[%s]</span><br/>", htmlpkg.EscapeString(status))
|
2014-04-16 14:35:08 -06:00
|
|
|
}
|
|
|
|
|
2014-03-14 16:58:22 -06:00
|
|
|
buf.WriteString("<pre>")
|
2014-07-09 05:59:55 -06:00
|
|
|
formatGoSource(&buf, src, fi.Links, h, s)
|
2014-03-14 16:58:22 -06:00
|
|
|
buf.WriteString("</pre>")
|
|
|
|
} else {
|
|
|
|
buf.WriteString("<pre>")
|
|
|
|
FormatText(&buf, src, 1, false, h, s)
|
|
|
|
buf.WriteString("</pre>")
|
|
|
|
}
|
2013-07-17 01:17:12 -06:00
|
|
|
fmt.Fprintf(&buf, `<p><a href="/%s?m=text">View as plain text</a></p>`, htmlpkg.EscapeString(relpath))
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServePage(w, Page{
|
2016-09-16 06:42:50 -06:00
|
|
|
Title: title,
|
|
|
|
SrcPath: relpath,
|
2013-07-17 01:17:12 -06:00
|
|
|
Tabtitle: relpath,
|
|
|
|
Body: buf.Bytes(),
|
2017-08-02 14:37:59 -06:00
|
|
|
GoogleCN: googleCN(r),
|
2013-07-17 01:17:12 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2014-03-14 16:58:22 -06:00
|
|
|
// formatGoSource HTML-escapes Go source text and writes it to w,
|
|
|
|
// decorating it with the specified analysis links.
|
|
|
|
//
|
|
|
|
func formatGoSource(buf *bytes.Buffer, text []byte, links []analysis.Link, pattern string, selection Selection) {
|
2014-04-09 13:41:18 -06:00
|
|
|
// Emit to a temp buffer so that we can add line anchors at the end.
|
|
|
|
saved, buf := buf, new(bytes.Buffer)
|
|
|
|
|
2014-03-14 16:58:22 -06:00
|
|
|
var i int
|
|
|
|
var link analysis.Link // shared state of the two funcs below
|
|
|
|
segmentIter := func() (seg Segment) {
|
|
|
|
if i < len(links) {
|
|
|
|
link = links[i]
|
|
|
|
i++
|
|
|
|
seg = Segment{link.Start(), link.End()}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
linkWriter := func(w io.Writer, offs int, start bool) {
|
|
|
|
link.Write(w, offs, start)
|
|
|
|
}
|
|
|
|
|
|
|
|
comments := tokenSelection(text, token.COMMENT)
|
|
|
|
var highlights Selection
|
|
|
|
if pattern != "" {
|
|
|
|
highlights = regexpSelection(text, pattern)
|
|
|
|
}
|
|
|
|
|
|
|
|
FormatSelections(buf, text, linkWriter, segmentIter, selectionTag, comments, highlights, selection)
|
2014-04-09 13:41:18 -06:00
|
|
|
|
|
|
|
// Now copy buf to saved, adding line anchors.
|
|
|
|
|
|
|
|
// The lineSelection mechanism can't be composed with our
|
|
|
|
// linkWriter, so we have to add line spans as another pass.
|
|
|
|
n := 1
|
|
|
|
for _, line := range bytes.Split(buf.Bytes(), []byte("\n")) {
|
2017-04-21 20:13:36 -06:00
|
|
|
// The line numbers are inserted into the document via a CSS ::before
|
|
|
|
// pseudo-element. This prevents them from being copied when users
|
|
|
|
// highlight and copy text.
|
|
|
|
// ::before is supported in 98% of browsers: https://caniuse.com/#feat=css-gencontent
|
|
|
|
// This is also the trick Github uses to hide line numbers.
|
|
|
|
//
|
|
|
|
// The first tab for the code snippet needs to start in column 9, so
|
|
|
|
// it indents a full 8 spaces, hence the two nbsp's. Otherwise the tab
|
|
|
|
// character only indents about two spaces.
|
|
|
|
fmt.Fprintf(saved, `<span id="L%d" class="ln" data-content="%6d"> </span>`, n, n)
|
2014-04-09 13:41:18 -06:00
|
|
|
n++
|
|
|
|
saved.Write(line)
|
|
|
|
saved.WriteByte('\n')
|
|
|
|
}
|
2014-03-14 16:58:22 -06:00
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
func (p *Presentation) serveDirectory(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
|
2013-07-17 01:17:12 -06:00
|
|
|
if redirect(w, r) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 21:14:09 -06:00
|
|
|
list, err := p.Corpus.fs.ReadDir(abspath)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServePage(w, Page{
|
2016-09-16 06:42:50 -06:00
|
|
|
Title: "Directory",
|
|
|
|
SrcPath: relpath,
|
2013-07-17 01:17:12 -06:00
|
|
|
Tabtitle: relpath,
|
2013-07-17 21:51:17 -06:00
|
|
|
Body: applyTemplate(p.DirlistHTML, "dirlistHTML", list),
|
2017-08-02 14:37:59 -06:00
|
|
|
GoogleCN: googleCN(r),
|
2013-07-17 01:17:12 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
func (p *Presentation) ServeHTMLDoc(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
|
2013-07-17 01:17:12 -06:00
|
|
|
// get HTML body contents
|
2013-07-17 21:14:09 -06:00
|
|
|
src, err := vfs.ReadFile(p.Corpus.fs, abspath)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("ReadFile: %s", err)
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// if it begins with "<!DOCTYPE " assume it is standalone
|
|
|
|
// html that doesn't need the template wrapping.
|
|
|
|
if bytes.HasPrefix(src, doctype) {
|
|
|
|
w.Write(src)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// if it begins with a JSON blob, read in the metadata.
|
|
|
|
meta, src, err := extractMetadata(src)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("decoding metadata %s: %v", relpath, err)
|
|
|
|
}
|
|
|
|
|
2015-09-01 17:49:30 -06:00
|
|
|
page := Page{
|
|
|
|
Title: meta.Title,
|
|
|
|
Subtitle: meta.Subtitle,
|
2017-08-02 14:37:59 -06:00
|
|
|
GoogleCN: googleCN(r),
|
2015-09-01 17:49:30 -06:00
|
|
|
}
|
|
|
|
|
2013-07-17 01:17:12 -06:00
|
|
|
// evaluate as template if indicated
|
|
|
|
if meta.Template {
|
2013-07-17 21:14:09 -06:00
|
|
|
tmpl, err := template.New("main").Funcs(p.TemplateFuncs()).Parse(string(src))
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("parsing template %s: %v", relpath, err)
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
var buf bytes.Buffer
|
2015-09-01 17:49:30 -06:00
|
|
|
if err := tmpl.Execute(&buf, page); err != nil {
|
2013-07-17 01:17:12 -06:00
|
|
|
log.Printf("executing template %s: %v", relpath, err)
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
src = buf.Bytes()
|
|
|
|
}
|
|
|
|
|
|
|
|
// if it's the language spec, add tags to EBNF productions
|
|
|
|
if strings.HasSuffix(abspath, "go_spec.html") {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
Linkify(&buf, src)
|
|
|
|
src = buf.Bytes()
|
|
|
|
}
|
|
|
|
|
2015-09-01 17:49:30 -06:00
|
|
|
page.Body = src
|
|
|
|
p.ServePage(w, page)
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
|
|
|
|
2013-07-17 21:14:09 -06:00
|
|
|
func (p *Presentation) ServeFile(w http.ResponseWriter, r *http.Request) {
|
|
|
|
p.serveFile(w, r)
|
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
func (p *Presentation) serveFile(w http.ResponseWriter, r *http.Request) {
|
2013-07-17 01:17:12 -06:00
|
|
|
relpath := r.URL.Path
|
|
|
|
|
|
|
|
// Check to see if we need to redirect or serve another file.
|
2013-07-17 21:14:09 -06:00
|
|
|
if m := p.Corpus.MetadataFor(relpath); m != nil {
|
2013-07-17 01:17:12 -06:00
|
|
|
if m.Path != relpath {
|
|
|
|
// Redirect to canonical path.
|
|
|
|
http.Redirect(w, r, m.Path, http.StatusMovedPermanently)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
// Serve from the actual filesystem path.
|
|
|
|
relpath = m.filePath
|
|
|
|
}
|
|
|
|
|
|
|
|
abspath := relpath
|
|
|
|
relpath = relpath[1:] // strip leading slash
|
|
|
|
|
|
|
|
switch pathpkg.Ext(relpath) {
|
|
|
|
case ".html":
|
|
|
|
if strings.HasSuffix(relpath, "/index.html") {
|
|
|
|
// We'll show index.html for the directory.
|
|
|
|
// Use the dir/ version as canonical instead of dir/index.html.
|
|
|
|
http.Redirect(w, r, r.URL.Path[0:len(r.URL.Path)-len("index.html")], http.StatusMovedPermanently)
|
|
|
|
return
|
|
|
|
}
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeHTMLDoc(w, r, abspath, relpath)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
|
|
|
|
case ".go":
|
2013-07-17 17:52:45 -06:00
|
|
|
p.serveTextFile(w, r, abspath, relpath, "Source file")
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 21:14:09 -06:00
|
|
|
dir, err := p.Corpus.fs.Lstat(abspath)
|
2013-07-17 01:17:12 -06:00
|
|
|
if err != nil {
|
|
|
|
log.Print(err)
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeError(w, r, relpath, err)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if dir != nil && dir.IsDir() {
|
|
|
|
if redirect(w, r) {
|
|
|
|
return
|
|
|
|
}
|
2013-07-17 21:14:09 -06:00
|
|
|
if index := pathpkg.Join(abspath, "index.html"); util.IsTextFile(p.Corpus.fs, index) {
|
2013-07-17 17:52:45 -06:00
|
|
|
p.ServeHTMLDoc(w, r, index, index)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
2013-07-17 17:52:45 -06:00
|
|
|
p.serveDirectory(w, r, abspath, relpath)
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-17 21:14:09 -06:00
|
|
|
if util.IsTextFile(p.Corpus.fs, abspath) {
|
2013-07-17 01:17:12 -06:00
|
|
|
if redirectFile(w, r) {
|
|
|
|
return
|
|
|
|
}
|
2013-07-17 17:52:45 -06:00
|
|
|
p.serveTextFile(w, r, abspath, relpath, "Text file")
|
2013-07-17 01:17:12 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2013-07-18 18:27:53 -06:00
|
|
|
p.fileServer.ServeHTTP(w, r)
|
2013-07-17 01:17:12 -06:00
|
|
|
}
|
|
|
|
|
2013-07-17 17:52:45 -06:00
|
|
|
func (p *Presentation) ServeText(w http.ResponseWriter, text []byte) {
|
2013-07-17 01:17:12 -06:00
|
|
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
|
|
w.Write(text)
|
|
|
|
}
|
2014-03-14 16:58:22 -06:00
|
|
|
|
|
|
|
func marshalJSON(x interface{}) []byte {
|
|
|
|
var data []byte
|
|
|
|
var err error
|
|
|
|
const indentJSON = false // for easier debugging
|
|
|
|
if indentJSON {
|
|
|
|
data, err = json.MarshalIndent(x, "", " ")
|
|
|
|
} else {
|
|
|
|
data, err = json.Marshal(x)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
panic(fmt.Sprintf("json.Marshal failed: %s", err))
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
}
|