2010-01-07 19:45:45 -07:00
|
|
|
# Copyright 2009 The Go Authors. All rights reserved.
|
|
|
|
# Use of this source code is governed by a BSD-style
|
|
|
|
# license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
# This is the server part of the continuous build system for Go. It must be run
|
|
|
|
# by AppEngine.
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
from django.utils import simplejson
|
2011-02-17 09:34:22 -07:00
|
|
|
from google.appengine.api import mail
|
2010-02-05 03:58:40 -07:00
|
|
|
from google.appengine.api import memcache
|
2010-01-07 19:45:45 -07:00
|
|
|
from google.appengine.ext import db
|
|
|
|
from google.appengine.ext import webapp
|
|
|
|
from google.appengine.ext.webapp import template
|
|
|
|
from google.appengine.ext.webapp.util import run_wsgi_app
|
|
|
|
import datetime
|
|
|
|
import hashlib
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2010-04-19 17:06:57 -06:00
|
|
|
import bz2
|
2010-01-07 19:45:45 -07:00
|
|
|
|
2010-02-05 03:58:40 -07:00
|
|
|
# local imports
|
2011-07-01 22:02:42 -06:00
|
|
|
from auth import auth
|
2011-02-17 09:34:22 -07:00
|
|
|
import const
|
2010-01-07 19:45:45 -07:00
|
|
|
|
2010-01-26 13:56:29 -07:00
|
|
|
# The majority of our state are commit objects. One of these exists for each of
|
2010-01-07 19:45:45 -07:00
|
|
|
# the commits known to the build system. Their key names are of the form
|
|
|
|
# <commit number (%08x)> "-" <hg hash>. This means that a sorting by the key
|
|
|
|
# name is sufficient to order the commits.
|
|
|
|
#
|
|
|
|
# The commit numbers are purely local. They need not match up to the commit
|
|
|
|
# numbers in an hg repo. When inserting a new commit, the parent commit must be
|
|
|
|
# given and this is used to generate the new commit number. In order to create
|
|
|
|
# the first Commit object, a special command (/init) is used.
|
2011-08-28 20:23:44 -06:00
|
|
|
#
|
|
|
|
# N.B. user is a StringProperty, so it must be type 'unicode'.
|
|
|
|
# desc is a BlobProperty, so it must be type 'string'. [sic]
|
2010-01-07 19:45:45 -07:00
|
|
|
class Commit(db.Model):
|
|
|
|
num = db.IntegerProperty() # internal, monotonic counter.
|
|
|
|
node = db.StringProperty() # Hg hash
|
|
|
|
parentnode = db.StringProperty() # Hg hash
|
|
|
|
user = db.StringProperty()
|
|
|
|
date = db.DateTimeProperty()
|
|
|
|
desc = db.BlobProperty()
|
|
|
|
|
|
|
|
# This is the list of builds. Each element is a string of the form <builder
|
2011-04-25 21:48:06 -06:00
|
|
|
# name> '`' <log hash>. If the log hash is empty, then the build was
|
2010-01-07 19:45:45 -07:00
|
|
|
# successful.
|
|
|
|
builds = db.StringListProperty()
|
|
|
|
|
2011-02-17 09:34:22 -07:00
|
|
|
fail_notification_sent = db.BooleanProperty()
|
|
|
|
|
2010-04-19 17:06:57 -06:00
|
|
|
# A CompressedLog contains the textual build log of a failed build.
|
|
|
|
# The key name is the hex digest of the SHA256 hash of the contents.
|
|
|
|
# The contents is bz2 compressed.
|
|
|
|
class CompressedLog(db.Model):
|
2010-01-07 19:45:45 -07:00
|
|
|
log = db.BlobProperty()
|
|
|
|
|
2010-01-27 16:42:37 -07:00
|
|
|
N = 30
|
|
|
|
|
2010-02-05 03:58:40 -07:00
|
|
|
def builderInfo(b):
|
|
|
|
f = b.split('-', 3)
|
|
|
|
goos = f[0]
|
|
|
|
goarch = f[1]
|
|
|
|
note = ""
|
|
|
|
if len(f) > 2:
|
|
|
|
note = f[2]
|
|
|
|
return {'name': b, 'goos': goos, 'goarch': goarch, 'note': note}
|
|
|
|
|
2010-02-19 09:20:59 -07:00
|
|
|
def builderset():
|
|
|
|
q = Commit.all()
|
|
|
|
q.order('-__key__')
|
|
|
|
results = q.fetch(N)
|
|
|
|
builders = set()
|
|
|
|
for c in results:
|
|
|
|
builders.update(set(parseBuild(build)['builder'] for build in c.builds))
|
|
|
|
return builders
|
2011-05-12 09:21:34 -06:00
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
class MainPage(webapp.RequestHandler):
|
|
|
|
def get(self):
|
|
|
|
self.response.headers['Content-Type'] = 'text/html; charset=utf-8'
|
|
|
|
|
2010-03-31 00:01:16 -06:00
|
|
|
try:
|
|
|
|
page = int(self.request.get('p', 1))
|
|
|
|
if not page > 0:
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
num = int(self.request.get('n', N))
|
|
|
|
if num <= 0 or num > 200:
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
num = N
|
|
|
|
|
|
|
|
offset = (page-1) * num
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
q = Commit.all()
|
|
|
|
q.order('-__key__')
|
2010-03-31 00:01:16 -06:00
|
|
|
results = q.fetch(num, offset)
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
revs = [toRev(r) for r in results]
|
2010-01-27 14:09:58 -07:00
|
|
|
builders = {}
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
for r in revs:
|
|
|
|
for b in r['builds']:
|
2010-02-05 03:58:40 -07:00
|
|
|
builders[b['builder']] = builderInfo(b['builder'])
|
2010-01-27 14:09:58 -07:00
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
for r in revs:
|
|
|
|
have = set(x['builder'] for x in r['builds'])
|
2010-01-27 14:09:58 -07:00
|
|
|
need = set(builders.keys()).difference(have)
|
2010-01-07 19:45:45 -07:00
|
|
|
for n in need:
|
|
|
|
r['builds'].append({'builder': n, 'log':'', 'ok': False})
|
|
|
|
r['builds'].sort(cmp = byBuilder)
|
|
|
|
|
2010-01-27 14:09:58 -07:00
|
|
|
builders = list(builders.items())
|
2010-01-07 19:45:45 -07:00
|
|
|
builders.sort()
|
2010-01-27 14:09:58 -07:00
|
|
|
values = {"revs": revs, "builders": [v for k,v in builders]}
|
2010-01-07 19:45:45 -07:00
|
|
|
|
2010-03-31 00:01:16 -06:00
|
|
|
values['num'] = num
|
|
|
|
values['prev'] = page - 1
|
|
|
|
if len(results) == num:
|
|
|
|
values['next'] = page + 1
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
path = os.path.join(os.path.dirname(__file__), 'main.html')
|
|
|
|
self.response.out.write(template.render(path, values))
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
# A DashboardHandler is a webapp.RequestHandler but provides
|
|
|
|
# authenticated_post - called by post after authenticating
|
|
|
|
# json - writes object in json format to response output
|
|
|
|
class DashboardHandler(webapp.RequestHandler):
|
|
|
|
def post(self):
|
|
|
|
if not auth(self.request):
|
|
|
|
self.response.set_status(403)
|
|
|
|
return
|
|
|
|
self.authenticated_post()
|
|
|
|
|
|
|
|
def authenticated_post(self):
|
|
|
|
return
|
|
|
|
|
|
|
|
def json(self, obj):
|
|
|
|
self.response.set_status(200)
|
|
|
|
simplejson.dump(obj, self.response.out)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Todo serves /todo. It tells the builder which commits need to be built.
|
|
|
|
class Todo(DashboardHandler):
|
2010-01-07 19:45:45 -07:00
|
|
|
def get(self):
|
|
|
|
builder = self.request.get('builder')
|
2011-04-25 21:48:06 -06:00
|
|
|
key = 'todo-%s' % builder
|
|
|
|
response = memcache.get(key)
|
|
|
|
if response is None:
|
|
|
|
# Fell out of memcache. Rebuild from datastore results.
|
|
|
|
# We walk the commit list looking for nodes that have not
|
2011-05-12 09:21:34 -06:00
|
|
|
# been built by this builder.
|
2011-04-25 21:48:06 -06:00
|
|
|
q = Commit.all()
|
|
|
|
q.order('-__key__')
|
|
|
|
todo = []
|
|
|
|
first = None
|
|
|
|
for c in q.fetch(N+1):
|
|
|
|
if first is None:
|
|
|
|
first = c
|
2011-05-12 09:21:34 -06:00
|
|
|
if not built(c, builder):
|
|
|
|
todo.append({'Hash': c.node})
|
|
|
|
response = simplejson.dumps(todo)
|
2011-04-25 21:48:06 -06:00
|
|
|
memcache.set(key, response, 3600)
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(200)
|
2011-04-25 21:48:06 -06:00
|
|
|
self.response.out.write(response)
|
|
|
|
|
|
|
|
def built(c, builder):
|
|
|
|
for b in c.builds:
|
|
|
|
if b.startswith(builder+'`'):
|
|
|
|
return True
|
|
|
|
return False
|
2010-01-07 19:45:45 -07:00
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
# Log serves /log/. It retrieves log data by content hash.
|
|
|
|
class LogHandler(DashboardHandler):
|
2010-01-07 19:45:45 -07:00
|
|
|
def get(self):
|
|
|
|
self.response.headers['Content-Type'] = 'text/plain; charset=utf-8'
|
|
|
|
hash = self.request.path[5:]
|
2010-04-19 17:06:57 -06:00
|
|
|
l = CompressedLog.get_by_key_name(hash)
|
2010-01-07 19:45:45 -07:00
|
|
|
if l is None:
|
|
|
|
self.response.set_status(404)
|
|
|
|
return
|
2010-04-19 17:06:57 -06:00
|
|
|
log = bz2.decompress(l.log)
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(200)
|
2010-04-19 17:06:57 -06:00
|
|
|
self.response.out.write(log)
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
# Init creates the commit with id 0. Since this commit doesn't have a parent,
|
|
|
|
# it cannot be created by Build.
|
2011-05-12 09:21:34 -06:00
|
|
|
class Init(DashboardHandler):
|
|
|
|
def authenticated_post(self):
|
2010-01-07 19:45:45 -07:00
|
|
|
date = parseDate(self.request.get('date'))
|
|
|
|
node = self.request.get('node')
|
|
|
|
if not validNode(node) or date is None:
|
|
|
|
logging.error("Not valid node ('%s') or bad date (%s %s)", node, date, self.request.get('date'))
|
|
|
|
self.response.set_status(500)
|
|
|
|
return
|
|
|
|
|
|
|
|
commit = Commit(key_name = '00000000-%s' % node)
|
|
|
|
commit.num = 0
|
|
|
|
commit.node = node
|
|
|
|
commit.parentnode = ''
|
2011-08-28 20:23:44 -06:00
|
|
|
commit.user = self.request.get('user')
|
2010-01-07 19:45:45 -07:00
|
|
|
commit.date = date
|
|
|
|
commit.desc = self.request.get('desc').encode('utf8')
|
|
|
|
|
|
|
|
commit.put()
|
|
|
|
|
|
|
|
self.response.set_status(200)
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
# The last commit when we switched to using entity groups.
|
|
|
|
# This is the root of the new commit entity group.
|
|
|
|
RootCommitKeyName = '00000f26-f32c6f1038207c55d5780231f7484f311020747e'
|
|
|
|
|
|
|
|
# CommitHandler serves /commit.
|
|
|
|
# A GET of /commit retrieves information about the specified commit.
|
|
|
|
# A POST of /commit creates a node for the given commit.
|
|
|
|
# If the commit already exists, the POST silently succeeds (like mkdir -p).
|
|
|
|
class CommitHandler(DashboardHandler):
|
|
|
|
def get(self):
|
|
|
|
node = self.request.get('node')
|
|
|
|
if not validNode(node):
|
|
|
|
return self.json({'Status': 'FAIL', 'Error': 'malformed node hash'})
|
|
|
|
n = nodeByHash(node)
|
|
|
|
if n is None:
|
|
|
|
return self.json({'Status': 'FAIL', 'Error': 'unknown revision'})
|
|
|
|
return self.json({'Status': 'OK', 'Node': nodeObj(n)})
|
|
|
|
|
|
|
|
def authenticated_post(self):
|
|
|
|
# Require auth with the master key, not a per-builder key.
|
|
|
|
if self.request.get('builder'):
|
|
|
|
self.response.set_status(403)
|
|
|
|
return
|
|
|
|
|
|
|
|
node = self.request.get('node')
|
|
|
|
date = parseDate(self.request.get('date'))
|
2011-08-28 20:23:44 -06:00
|
|
|
user = self.request.get('user')
|
2011-05-12 09:21:34 -06:00
|
|
|
desc = self.request.get('desc').encode('utf8')
|
|
|
|
parenthash = self.request.get('parent')
|
|
|
|
|
|
|
|
if not validNode(node) or not validNode(parenthash) or date is None:
|
|
|
|
return self.json({'Status': 'FAIL', 'Error': 'malformed node, parent, or date'})
|
|
|
|
|
|
|
|
n = nodeByHash(node)
|
|
|
|
if n is None:
|
|
|
|
p = nodeByHash(parenthash)
|
|
|
|
if p is None:
|
|
|
|
return self.json({'Status': 'FAIL', 'Error': 'unknown parent'})
|
|
|
|
|
|
|
|
# Want to create new node in a transaction so that multiple
|
|
|
|
# requests creating it do not collide and so that multiple requests
|
|
|
|
# creating different nodes get different sequence numbers.
|
|
|
|
# All queries within a transaction must include an ancestor,
|
|
|
|
# but the original datastore objects we used for the dashboard
|
|
|
|
# have no common ancestor. Instead, we use a well-known
|
|
|
|
# root node - the last one before we switched to entity groups -
|
|
|
|
# as the as the common ancestor.
|
|
|
|
root = Commit.get_by_key_name(RootCommitKeyName)
|
|
|
|
|
|
|
|
def add_commit():
|
|
|
|
if nodeByHash(node, ancestor=root) is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Determine number for this commit.
|
|
|
|
# Once we have created one new entry it will be lastRooted.num+1,
|
|
|
|
# but the very first commit created in this scheme will have to use
|
|
|
|
# last.num's number instead (last is likely not rooted).
|
|
|
|
q = Commit.all()
|
|
|
|
q.order('-__key__')
|
|
|
|
q.ancestor(root)
|
|
|
|
last = q.fetch(1)[0]
|
|
|
|
num = last.num+1
|
|
|
|
|
|
|
|
n = Commit(key_name = '%08x-%s' % (num, node), parent = root)
|
|
|
|
n.num = num
|
|
|
|
n.node = node
|
|
|
|
n.parentnode = parenthash
|
|
|
|
n.user = user
|
|
|
|
n.date = date
|
2011-08-30 07:00:33 -06:00
|
|
|
n.desc = desc
|
2011-05-12 09:21:34 -06:00
|
|
|
n.put()
|
|
|
|
db.run_in_transaction(add_commit)
|
|
|
|
n = nodeByHash(node)
|
|
|
|
if n is None:
|
|
|
|
return self.json({'Status': 'FAIL', 'Error': 'failed to create commit node'})
|
|
|
|
|
|
|
|
return self.json({'Status': 'OK', 'Node': nodeObj(n)})
|
|
|
|
|
|
|
|
# Build serves /build.
|
|
|
|
# A POST to /build records a new build result.
|
2010-01-07 19:45:45 -07:00
|
|
|
class Build(webapp.RequestHandler):
|
|
|
|
def post(self):
|
2010-01-27 14:09:58 -07:00
|
|
|
if not auth(self.request):
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(403)
|
|
|
|
return
|
|
|
|
|
|
|
|
builder = self.request.get('builder')
|
2011-08-28 20:23:44 -06:00
|
|
|
log = self.request.get('log').encode('utf8')
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
loghash = ''
|
|
|
|
if len(log) > 0:
|
|
|
|
loghash = hashlib.sha256(log).hexdigest()
|
2010-04-19 17:06:57 -06:00
|
|
|
l = CompressedLog(key_name=loghash)
|
|
|
|
l.log = bz2.compress(log)
|
2010-01-07 19:45:45 -07:00
|
|
|
l.put()
|
|
|
|
|
|
|
|
node = self.request.get('node')
|
2011-05-12 09:21:34 -06:00
|
|
|
if not validNode(node):
|
|
|
|
logging.error('Invalid node %s' % (node))
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(500)
|
|
|
|
return
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
n = nodeByHash(node)
|
|
|
|
if n is None:
|
|
|
|
logging.error('Cannot find node %s' % (node))
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(404)
|
|
|
|
return
|
2011-05-12 09:21:34 -06:00
|
|
|
nn = n
|
2011-02-17 09:34:22 -07:00
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
def add_build():
|
2011-05-12 09:21:34 -06:00
|
|
|
n = nodeByHash(node, ancestor=nn)
|
2010-01-07 19:45:45 -07:00
|
|
|
if n is None:
|
2011-05-12 09:21:34 -06:00
|
|
|
logging.error('Cannot find hash in add_build: %s %s' % (builder, node))
|
|
|
|
return
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
s = '%s`%s' % (builder, loghash)
|
|
|
|
for i, b in enumerate(n.builds):
|
|
|
|
if b.split('`', 1)[0] == builder:
|
2011-05-12 09:21:34 -06:00
|
|
|
# logging.error('Found result for %s %s already' % (builder, node))
|
2010-01-07 19:45:45 -07:00
|
|
|
n.builds[i] = s
|
|
|
|
break
|
|
|
|
else:
|
2011-05-12 09:21:34 -06:00
|
|
|
# logging.error('Added result for %s %s' % (builder, node))
|
2010-01-07 19:45:45 -07:00
|
|
|
n.builds.append(s)
|
|
|
|
n.put()
|
|
|
|
|
|
|
|
db.run_in_transaction(add_build)
|
|
|
|
|
2011-04-25 21:48:06 -06:00
|
|
|
key = 'todo-%s' % builder
|
2010-02-25 11:26:43 -07:00
|
|
|
memcache.delete(key)
|
2010-01-07 19:45:45 -07:00
|
|
|
|
2011-05-29 19:27:31 -06:00
|
|
|
c = getBrokenCommit(node, builder)
|
|
|
|
if c is not None and not c.fail_notification_sent:
|
2011-10-08 13:50:21 -06:00
|
|
|
notifyBroken(c, builder, log)
|
2011-02-17 09:34:22 -07:00
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
self.response.set_status(200)
|
|
|
|
|
2011-05-29 19:27:31 -06:00
|
|
|
|
|
|
|
def getBrokenCommit(node, builder):
|
|
|
|
"""
|
|
|
|
getBrokenCommit returns a Commit that breaks the build.
|
|
|
|
The Commit will be either the one specified by node or the one after.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Squelch mail if already fixed.
|
|
|
|
head = firstResult(builder)
|
|
|
|
if broken(head, builder) == False:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get current node and node before, after.
|
|
|
|
cur = nodeByHash(node)
|
|
|
|
if cur is None:
|
|
|
|
return
|
|
|
|
before = nodeBefore(cur)
|
|
|
|
after = nodeAfter(cur)
|
|
|
|
|
|
|
|
if broken(before, builder) == False and broken(cur, builder):
|
|
|
|
return cur
|
|
|
|
if broken(cur, builder) == False and broken(after, builder):
|
|
|
|
return after
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
def firstResult(builder):
|
2011-06-05 18:41:11 -06:00
|
|
|
q = Commit.all().order('-__key__')
|
|
|
|
for c in q.fetch(20):
|
2011-05-29 19:27:31 -06:00
|
|
|
for i, b in enumerate(c.builds):
|
|
|
|
p = b.split('`', 1)
|
|
|
|
if p[0] == builder:
|
|
|
|
return c
|
|
|
|
return None
|
|
|
|
|
|
|
|
def nodeBefore(c):
|
|
|
|
return nodeByHash(c.parentnode)
|
|
|
|
|
|
|
|
def nodeAfter(c):
|
|
|
|
return Commit.all().filter('parenthash', c.node).get()
|
|
|
|
|
2011-10-08 13:50:21 -06:00
|
|
|
def notifyBroken(c, builder, log):
|
2011-05-29 19:27:31 -06:00
|
|
|
def send():
|
2011-06-05 18:41:11 -06:00
|
|
|
n = Commit.get(c.key())
|
|
|
|
if n is None:
|
|
|
|
logging.error("couldn't retrieve Commit '%s'" % c.key())
|
|
|
|
return False
|
|
|
|
if n.fail_notification_sent:
|
|
|
|
return False
|
2011-05-29 19:27:31 -06:00
|
|
|
n.fail_notification_sent = True
|
|
|
|
return n.put()
|
|
|
|
if not db.run_in_transaction(send):
|
2011-10-08 13:50:21 -06:00
|
|
|
return
|
|
|
|
|
|
|
|
# get last 100 lines of the build log
|
|
|
|
log = '\n'.join(log.split('\n')[-100:])
|
2011-05-29 19:27:31 -06:00
|
|
|
|
|
|
|
subject = const.mail_fail_subject % (builder, c.desc.split('\n')[0])
|
|
|
|
path = os.path.join(os.path.dirname(__file__), 'fail-notify.txt')
|
|
|
|
body = template.render(path, {
|
|
|
|
"builder": builder,
|
|
|
|
"node": c.node,
|
|
|
|
"user": c.user,
|
|
|
|
"desc": c.desc,
|
2011-10-08 13:50:21 -06:00
|
|
|
"loghash": logHash(c, builder),
|
|
|
|
"log": log,
|
2011-05-29 19:27:31 -06:00
|
|
|
})
|
|
|
|
mail.send_mail(
|
|
|
|
sender=const.mail_from,
|
|
|
|
to=const.mail_fail_to,
|
|
|
|
subject=subject,
|
|
|
|
body=body
|
|
|
|
)
|
|
|
|
|
|
|
|
def logHash(c, builder):
|
|
|
|
for i, b in enumerate(c.builds):
|
|
|
|
p = b.split('`', 1)
|
|
|
|
if p[0] == builder:
|
|
|
|
return p[1]
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def broken(c, builder):
|
|
|
|
"""
|
|
|
|
broken returns True if commit c breaks the build for the specified builder,
|
|
|
|
False if it is a good build, and None if no results exist for this builder.
|
|
|
|
"""
|
|
|
|
if c is None:
|
|
|
|
return None
|
2011-02-17 09:34:22 -07:00
|
|
|
for i, b in enumerate(c.builds):
|
|
|
|
p = b.split('`', 1)
|
|
|
|
if p[0] == builder:
|
|
|
|
return len(p[1]) > 0
|
2011-05-29 19:27:31 -06:00
|
|
|
return None
|
2011-02-17 09:34:22 -07:00
|
|
|
|
2010-02-05 03:58:40 -07:00
|
|
|
def node(num):
|
|
|
|
q = Commit.all()
|
|
|
|
q.filter('num =', num)
|
|
|
|
n = q.get()
|
|
|
|
return n
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
def nodeByHash(hash, ancestor=None):
|
|
|
|
q = Commit.all()
|
|
|
|
q.filter('node =', hash)
|
|
|
|
if ancestor is not None:
|
|
|
|
q.ancestor(ancestor)
|
|
|
|
n = q.get()
|
|
|
|
return n
|
|
|
|
|
|
|
|
# nodeObj returns a JSON object (ready to be passed to simplejson.dump) describing node.
|
|
|
|
def nodeObj(n):
|
|
|
|
return {
|
|
|
|
'Hash': n.node,
|
|
|
|
'ParentHash': n.parentnode,
|
|
|
|
'User': n.user,
|
|
|
|
'Date': n.date.strftime('%Y-%m-%d %H:%M %z'),
|
|
|
|
'Desc': n.desc,
|
|
|
|
}
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
class FixedOffset(datetime.tzinfo):
|
|
|
|
"""Fixed offset in minutes east from UTC."""
|
|
|
|
|
|
|
|
def __init__(self, offset):
|
|
|
|
self.__offset = datetime.timedelta(seconds = offset)
|
|
|
|
|
|
|
|
def utcoffset(self, dt):
|
|
|
|
return self.__offset
|
|
|
|
|
|
|
|
def tzname(self, dt):
|
|
|
|
return None
|
|
|
|
|
|
|
|
def dst(self, dt):
|
|
|
|
return datetime.timedelta(0)
|
|
|
|
|
|
|
|
def validNode(node):
|
|
|
|
if len(node) != 40:
|
|
|
|
return False
|
|
|
|
for x in node:
|
|
|
|
o = ord(x)
|
|
|
|
if (o < ord('0') or o > ord('9')) and (o < ord('a') or o > ord('f')):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def parseDate(date):
|
|
|
|
if '-' in date:
|
|
|
|
(a, offset) = date.split('-', 1)
|
|
|
|
try:
|
|
|
|
return datetime.datetime.fromtimestamp(float(a), FixedOffset(0-int(offset)))
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
if '+' in date:
|
|
|
|
(a, offset) = date.split('+', 1)
|
|
|
|
try:
|
|
|
|
return datetime.datetime.fromtimestamp(float(a), FixedOffset(int(offset)))
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
return datetime.datetime.utcfromtimestamp(float(date))
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
email_re = re.compile('^[^<]+<([^>]*)>$')
|
|
|
|
|
|
|
|
def toUsername(user):
|
|
|
|
r = email_re.match(user)
|
|
|
|
if r is None:
|
|
|
|
return user
|
|
|
|
email = r.groups()[0]
|
|
|
|
return email.replace('@golang.org', '')
|
|
|
|
|
|
|
|
def dateToShortStr(d):
|
|
|
|
return d.strftime('%a %b %d %H:%M')
|
|
|
|
|
|
|
|
def parseBuild(build):
|
|
|
|
[builder, logblob] = build.split('`')
|
|
|
|
return {'builder': builder, 'log': logblob, 'ok': len(logblob) == 0}
|
|
|
|
|
2010-02-05 03:58:40 -07:00
|
|
|
def nodeInfo(c):
|
|
|
|
return {
|
|
|
|
"node": c.node,
|
|
|
|
"user": toUsername(c.user),
|
|
|
|
"date": dateToShortStr(c.date),
|
|
|
|
"desc": c.desc,
|
|
|
|
"shortdesc": c.desc.split('\n', 2)[0]
|
|
|
|
}
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
def toRev(c):
|
2010-02-05 03:58:40 -07:00
|
|
|
b = nodeInfo(c)
|
|
|
|
b['builds'] = [parseBuild(build) for build in c.builds]
|
|
|
|
return b
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
def byBuilder(x, y):
|
|
|
|
return cmp(x['builder'], y['builder'])
|
|
|
|
|
2011-05-12 09:21:34 -06:00
|
|
|
# Give old builders work; otherwise they pound on the web site.
|
|
|
|
class Hwget(DashboardHandler):
|
|
|
|
def get(self):
|
|
|
|
self.response.out.write("8000\n")
|
|
|
|
|
2010-01-07 19:45:45 -07:00
|
|
|
# This is the URL map for the server. The first three entries are public, the
|
|
|
|
# rest are only used by the builders.
|
|
|
|
application = webapp.WSGIApplication(
|
|
|
|
[('/', MainPage),
|
2011-05-12 09:21:34 -06:00
|
|
|
('/hw-get', Hwget),
|
2010-01-07 19:45:45 -07:00
|
|
|
('/log/.*', LogHandler),
|
2011-05-12 09:21:34 -06:00
|
|
|
('/commit', CommitHandler),
|
2010-01-07 19:45:45 -07:00
|
|
|
('/init', Init),
|
2011-05-12 09:21:34 -06:00
|
|
|
('/todo', Todo),
|
2010-01-07 19:45:45 -07:00
|
|
|
('/build', Build),
|
2010-01-27 14:09:58 -07:00
|
|
|
], debug=True)
|
2010-01-07 19:45:45 -07:00
|
|
|
|
|
|
|
def main():
|
|
|
|
run_wsgi_app(application)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|
2010-02-25 11:26:43 -07:00
|
|
|
|