changeset 293:d15dda1b1f76

merge
author Matt Johnston <matt@ucc.asn.au>
date Sat, 06 Jul 2019 18:29:45 +0800
parents 28eb733cb803 (current diff) f7261dd970da (diff)
children 6bacd8ca9f8f
files py/uploader.py
diffstat 30 files changed, 1989 insertions(+), 989 deletions(-) [+]
line wrap: on
line diff
--- a/py/config.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/config.py	Sat Jul 06 18:29:45 2019 +0800
@@ -13,15 +13,16 @@
 PARAMS_FILE = os.path.join(os.path.dirname(__file__), 'tempserver.conf')
 
 SENSOR_BASE_DIR = '/sys/devices/w1_bus_master1'
-FRIDGE_GPIO = '/sys/devices/virtual/gpio/gpio17'
+FRIDGE_GPIO_PIN = 17
 WORT_NAME = '28-0000042cf4dd'
 FRIDGE_NAME = '28-0000042cccc4'
 AMBIENT_NAME = '28-0000042c6dbb'
 INTERNAL_TEMPERATURE = '/sys/class/thermal/thermal_zone0/temp'
 
 HMAC_KEY = "a key"
-#UPDATE_URL = 'https://matt.ucc.asn.au/test/templog/update'
-UPDATE_URL = 'https://evil.ucc.asn.au/~matt/templog/update'
+SERVER_URL = 'https://evil.ucc.asn.au/~matt/templog'
+UPDATE_URL = "%s/update" % SERVER_URL
+SETTINGS_URL = "%s/get_settings" % SERVER_URL
 
 # site-local values overridden in localconfig, eg WORT_NAME, HMAC_KEY
 try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/configwaiter.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,62 @@
+import asyncio
+import aiohttp
+
+import utils
+from utils import L,D,EX,W,E
+import config
+
+class ConfigWaiter(object):
+    """ Waits for config updates from the server. http long polling """
+
+    def __init__(self, server):
+        self.server = server
+        self.epoch_tag = None
+        self.http_session = aiohttp.ClientSession()
+
+    @asyncio.coroutine
+    def run(self):
+        # wait until someting has been uploaded (the uploader itself waits 5 seconds)
+        yield from asyncio.sleep(10)
+        while True:
+            yield from self.do()
+
+            # avoid spinning too fast
+            yield from asyncio.sleep(1)
+
+    @asyncio.coroutine
+    def do(self):
+        try:
+            if self.epoch_tag:
+                headers = {'etag': self.epoch_tag}
+            else:
+                headers = None
+
+            r = yield from asyncio.wait_for(
+                self.http_session.get(config.SETTINGS_URL, headers=headers), 
+                300)
+            D("waiter status %d" % r.status)
+            if r.status == 200:
+                rawresp = yield from asyncio.wait_for(r.text(), 600)
+
+                resp = utils.json_load_round_float(rawresp)
+
+                self.epoch_tag = resp['epoch_tag']
+                D("waiter got epoch tag %s" % self.epoch_tag)
+                epoch = self.epoch_tag.split('-')[0]
+                if self.server.params.receive(resp['params'], epoch):
+                    self.server.reload_signal(True)
+            elif r.status == 304:
+                pass
+            else:
+                # longer timeout to avoid spinning
+                yield from asyncio.sleep(30)
+
+        except asyncio.TimeoutError:
+            D("configwaiter http timed out")
+            pass
+        except Exception as e:
+            EX("Error watching config: %s" % str(e))
+
+
+
+
--- a/py/fridge.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/fridge.py	Sat Jul 06 18:29:45 2019 +0800
@@ -1,60 +1,46 @@
 # -*- coding: utf-8 -*-
+import asyncio
+
 from utils import L,W,E,EX,D
 import config
-import gevent
 
-class Fridge(gevent.Greenlet):
+import gpio
+
+class Fridge(object):
 
     OVERSHOOT_MAX_DIV = 1800.0 # 30 mins
 
     def __init__(self, server):
-        gevent.Greenlet.__init__(self)
         self.server = server
-        self.setup_gpio()
+        self.gpio = gpio.Gpio(config.FRIDGE_GPIO_PIN, "fridge")
         self.wort_valid_clock = 0
         self.fridge_on_clock = 0
         self.off()
 
-    def setup_gpio(self):
-        dir_fn = '%s/direction' % config.FRIDGE_GPIO
-        with open(dir_fn, 'w') as f:
-            f.write('low')
-        val_fn = '%s/value' % config.FRIDGE_GPIO
-        # XXX - Fridge should have __enter__/__exit__, close the file there.
-        self.value_file = open(val_fn, 'r+')
-
     def turn(self, value):
-        self.value_file.seek(0)
-        if value:
-            self.value_file.write('1')
-        else:
-            self.value_file.write('0')
-        self.value_file.flush()
+        self.gpio.turn(value)
 
     def on(self):
         self.turn(True)
+        pass
 
     def off(self):
         self.turn(False)
         self.fridge_off_clock = self.server.now()
 
     def is_on(self):
-        self.value_file.seek(0)
-        buf = self.value_file.read().strip()
-        if buf == '0':
-            return False
-        if buf != '1':
-            E("Bad value read from gpio '%s': '%s'" 
-                % (self.value_file.name, buf))
-        return True
+        return self.gpio.get_state()
 
-    # greenlet subclassed
-    def _run(self):
+    @asyncio.coroutine
+    def run(self):
         if self.server.params.disabled:
             L("Fridge is disabled")
         while True:
-            self.do()
-            self.server.sleep(config.FRIDGE_SLEEP)
+            try:
+                self.do()
+                yield from self.server.sleep(config.FRIDGE_SLEEP)
+            except Exception as e:
+                EX("fridge failed")
 
     def do(self):
         """ this is the main fridge control logic """
@@ -96,6 +82,8 @@
         if fridge is None:
             W("Invalid fridge sensor")
 
+        D("fridge on %s" % self.is_on())
+
         if self.is_on():
             turn_off = False
             on_time = self.server.now() - self.fridge_on_clock
@@ -124,6 +112,7 @@
         else:
             # fridge is off
             turn_on = False
+            D("fridge %(fridge)s max %(fridge_max)s wort %(wort)s wort_max %(wort_max)s" % locals())
             if not params.nowort \
                 and wort is not None \
                 and wort >= wort_max:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/gpio.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,4 @@
+try:
+	from gpio_rpi import *
+except ImportError:
+	from gpio_test import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/gpio_rpi.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,49 @@
+import os
+
+import RPi.GPIO as GPIO
+
+from utils import L,D,EX,W
+
+__all__ = ["Gpio"]
+
+class Gpio(object):
+    SYS_GPIO_BASE = '/sys/class/gpio/gpio'
+    def __init__(self, pin, name):
+        self.pin = pin
+        self.name = name
+
+        dir_fn = '%s%d/direction' % (self.SYS_GPIO_BASE, pin)
+        with open(dir_fn, 'w') as f:
+            # make sure it doesn't start "on"
+            f.write('low')
+        val_fn = '%s%d/value' % (self.SYS_GPIO_BASE, pin)
+        self.value_file = open(val_fn, 'r+')
+
+    def turn(self, value):
+        self.value_file.seek(0)
+        self.value_file.write('1' if value else '0')
+        self.value_file.flush()
+
+    def get_state(self):
+        self.value_file.seek(0)
+        buf = self.value_file.read().strip()
+        if buf == '0':
+            return False
+        if buf != '1':
+            E("Bad value read from gpio '%s': '%s'" 
+                % (self.value_file.name, buf))
+        return True
+
+
+def main():
+    g = Gpio(17, 'f')
+    g.turn(1)
+
+    print(g.get_state())
+
+    g.turn(0)
+
+    print(g.get_state())
+
+if __name__ == '__main__':
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/gpio_test.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,23 @@
+import os
+
+from utils import L,D,EX,W
+
+__all__ = ["Gpio"]
+
+class Gpio(object):
+	def __init__(self, pin, name):
+		self.name = name
+		self.pin = name
+		self.state = False
+		L("Test GPIO %s pin %d started, set off." % (name, pin))
+
+	def turn(self, value):
+		self.state = bool(value)
+		onoff = ("off", "on")[int(self.state)]
+		L("Test GPIO %s pin %s turned %s" % (self.name, self.pin, onoff))
+
+	def get_state(self):
+		return self.state
+		
+
+	
--- a/py/params.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/params.py	Sat Jul 06 18:29:45 2019 +0800
@@ -2,12 +2,13 @@
 import collections
 import json
 import signal
-import StringIO
-
-import gevent
+import tempfile
+import os
+import binascii
 
 import config
 from utils import W,L,E,EX
+import utils
 
 _FIELD_DEFAULTS = {
     'fridge_setpoint': 16,
@@ -26,6 +27,7 @@
 
     def __init__(self):
         self.update(_FIELD_DEFAULTS)
+        self._set_epoch(None)
 
     def __getattr__(self, k):
         return self[k]
@@ -35,16 +37,14 @@
         self[k]
         self[k] = v
 
-    def load(self, f = None):
-        if not f:
-            try:
-                f = file(config.PARAMS_FILE, 'r')
-            except IOError, e:
-                W("Missing parameter file, using defaults. %s", e)
-                return
+    def _set_epoch(self, epoch):
+        # since __setattr__ is overridden
+        object.__setattr__(self, '_epoch', epoch)
+
+    def _do_load(self, f):
         try:
-            u = json.load(f)
-        except Exception, e:
+            u = utils.json_load_round_float(f.read())
+        except Exception as e:
             raise self.Error(e)
 
         for k in u:
@@ -53,19 +53,77 @@
             if k not in self:
                 raise self.Error("Unknown parameter %s=%s in file '%s'" % (str(k), str(u[k]), getattr(f, 'name', '???')))
         self.update(u)
+        # new epoch, 120 random bits
+        self._set_epoch(binascii.hexlify(os.urandom(15)).decode())
 
         L("Loaded parameters")
         L(self.save_string())
 
+    def load(self, f = None):
+        if f:
+            return self._do_load(f)
+        else:
+            with open(config.PARAMS_FILE, 'r') as f:
+                try:
+                    return self._do_load(f)
+                except IOError as e:
+                    W("Missing parameter file, using defaults. %s" % str(e))
+                    return
 
-    def save(self, f = None):
-        if not f:
-            f = file(config.PARAMS_FILE, 'w')
-        json.dump(self, f, sort_keys=True, indent=4)
-        f.write('\n')
-        f.flush()
+    def get_epoch(self):
+        return self._epoch
+
+    def receive(self, params, epoch):
+        """ updates parameters from the server. does some validation,
+        writes config file to disk.
+        Returns True on success, False failure 
+        """
+
+        if epoch != self._epoch:
+            return
+
+        def same_type(a, b):
+            ta = type(a)
+            tb = type(b)
+
+            if ta == int:
+                ta = float
+            if tb == int:
+                tb = float
+
+            return ta == tb
+
+        if self.keys() != params.keys():
+            diff = self.keys() ^ params.keys()
+            E("Mismatching params, %s" % str(diff))
+            return False
+
+        for k, v in params.items():
+            if not same_type(v, self[k]):
+                E("Bad type for %s" % k)
+                return False
+
+        dir = os.path.dirname(config.PARAMS_FILE)
+        try:
+            t = tempfile.NamedTemporaryFile(prefix='config',
+                mode='w+t', # NamedTemporaryFile is binary by default
+                dir = dir,
+                delete = False)
+
+            out = json.dumps(params, sort_keys=True, indent=4)+'\n'
+            t.write(out)
+            name = t.name
+            t.close()
+
+            os.rename(name, config.PARAMS_FILE)
+        except Exception as e:
+            EX("Problem: %s" % e)
+            return False
+
+        self.update(params)
+        L("Received parameters")
+        L(self.save_string())
+        return True
 
     def save_string(self):
-        s = StringIO.StringIO()
-        self.save(s)
-        return s.getvalue()
+        return json.dumps(self, sort_keys=True, indent=4)
--- a/py/receive.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/receive.py	Sat Jul 06 18:29:45 2019 +0800
@@ -28,8 +28,8 @@
 
     def_params = params.Params()
 
-    if def_params.viewkeys() != new_params.viewkeys():
-        diff = def_params.viewkeys() ^ new_params.viewkeys()
+    if def_params.keys() != new_params.keys():
+        diff = def_params.keys() ^ new_params.keys()
         return "Mismatching params, %s" % str(diff)
 
     for k, v in new_params.items():
@@ -48,7 +48,7 @@
         t.close()
 
         os.rename(name, config.PARAMS_FILE)
-    except Exception, e:
+    except Exception as e:
         return "Problem: %s" % e
 
     try:
@@ -56,11 +56,11 @@
         if pid < 2:
             return "Bad pid %d" % pid
         os.kill(pid, signal.SIGHUP)
-    except Exception, e:
+    except Exception as e:
         return "HUP problem: %s" % e
 
     return 'Good Update'
 
 if __name__ == '__main__':
-    print main()
+    print(main())
 
--- a/py/requirements.txt	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/requirements.txt	Sat Jul 06 18:29:45 2019 +0800
@@ -1,17 +1,9 @@
-argparse==1.2.1
-wsgiref==0.1.2
-
-# sha256: v6nYRtuRp9i2o26HNT7tZBx-Pn0L-guZdXltIn8ttOs
-gevent==1.0
-
-# sha256: sWDlVqIuFrrj8_Y__OeJhoLIA82JZFcZL3tU_nT-mR4
-greenlet==0.4.2
+# sha256: nkIlLxfR3YnuMXReDE--WIYsJRR-sO9SlcnNm8tOosE
+lockfile==0.10.2
 
-# sha256: I9pYnJH1nLfGRNXOXfU51Eg0G9R5kX1t3pc_guJxkUc
-lockfile==0.9.1
+# sha256: 2zFqD89UuXAsr2ymGbdr4l1T9e4Hgbr_C7ni4DVfryQ
+python-daemon==2.0.5
 
-# sha256: FmX7Fr_q5y8Wqi3kC8dWYUWL1Ccxp9RjqRGo1er5bAs
-python-daemon==1.6
+# sha256: 6vR5rMmP_uCgKYgkZevyHzwwLhuUpBsWyKWmlbxhSQA
+aiohttp==0.16.3
 
-# sha256: NkiAJJLpVf_rKPbauGStcUBZ9UOL9nmNgvnUd8ZmrKM
-requests==2.3.0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/sensor.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,10 @@
+import os
+
+def make_sensor(server):
+    if server.test_mode():
+        import sensor_test
+        return sensor_test.SensorTest(server)
+    else:
+        import sensor_ds18b20
+        return sensor_ds18b20.SensorDS18B20(server)
+
--- a/py/sensor_ds18b20.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/sensor_ds18b20.py	Sat Jul 06 18:29:45 2019 +0800
@@ -2,27 +2,26 @@
 
 import os
 import re
-
-import gevent
-import gevent.threadpool
+import asyncio
+import concurrent.futures
 
 import config
 from utils import D,L,W,E,EX
 
-class DS18B20s(gevent.Greenlet):
+class SensorDS18B20(object):
 
     THERM_RE = re.compile('.* YES\n.*t=(.*)\n', re.MULTILINE)
 
     def __init__(self, server):
-        gevent.Greenlet.__init__(self)
         self.server = server
-        self.readthread = gevent.threadpool.ThreadPool(1)
+        self.readthread = concurrent.futures.ThreadPoolExecutor(max_workers=1)
         self.master_dir = config.SENSOR_BASE_DIR
 
+    @asyncio.coroutine
     def do(self):
         vals = {}
         for n in self.sensor_names():
-                value = self.do_sensor(n)
+                value = yield from self.do_sensor(n)
                 if value is not None:
                     vals[n] = value
 
@@ -32,26 +31,31 @@
 
         self.server.add_reading(vals)
 
-    def _run(self):
+    @asyncio.coroutine
+    def run(self):
         while True:
-            self.do()
-            self.server.sleep(config.SENSOR_SLEEP)
+            yield from self.do()
+            yield from asyncio.sleep(config.SENSOR_SLEEP)
 
+
+    @asyncio.coroutine
     def read_wait(self, f):
-        # handles a blocking file read with a gevent threadpool. A
-        # real python thread performs the read while other gevent
-        # greenlets keep running.
+        # handles a blocking file read with a threadpool. A
+        # real python thread performs the read while other 
+        # asyncio tasks keep running.
         # the ds18b20 takes ~750ms to read, which is noticable
         # interactively.
-        return self.readthread.apply(f.read)
+        loop = asyncio.get_event_loop()
+        return loop.run_in_executor(None, f.read)
 
+    @asyncio.coroutine
     def do_sensor(self, s, contents = None):
         """ contents can be set by the caller for testing """
         try:
             if contents is None:
                 fn = os.path.join(self.master_dir, s, 'w1_slave')
-                f = open(fn, 'r')
-                contents = self.read_wait(f)
+                with open(fn, 'r') as f:
+                    contents = yield from self.read_wait(f)
 
             match = self.THERM_RE.match(contents)
             if match is None:
@@ -62,14 +66,15 @@
                 E("Problem reading sensor '%s': %f" % (s, temp))
                 return None
             return temp
-        except Exception, e:
+        except Exception as e:
             EX("Problem reading sensor '%s': %s" % (s, str(e)))
             return None
 
     def do_internal(self):
         try:
-            return int(open(config.INTERNAL_TEMPERATURE, 'r').read()) / 1000.0
-        except Exception, e:
+            with open(config.INTERNAL_TEMPERATURE, 'r') as f:
+                return int(f.read()) / 1000.0
+        except Exception as e:
             EX("Problem reading internal sensor: %s" % str(e))
             return None
         
@@ -77,7 +82,8 @@
     def sensor_names(self):
         """ Returns a sequence of sensorname """
         slaves_path = os.path.join(self.master_dir, "w1_master_slaves")
-        contents = open(slaves_path, 'r').read()
+        with open(slaves_path, 'r') as f:
+            contents = f.read()
         if 'not found' in contents:
             E("No W1 sensors found")
             return []
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/py/sensor_test.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,34 @@
+import asyncio
+
+class SensorTest(object):
+
+    def __init__(self, server):
+        self.server = server
+        
+    def kill(self):
+        L("Killed SensorTest")
+
+    def make_vals(self):
+        def try_read(f, fallback):
+            try:
+                return float(open(f, 'r').read())
+            except Exception as e:
+                return fallback
+
+        vals = {}
+        vals[self.wort_name()] = try_read('test_wort.txt', 18)
+        vals[self.fridge_name()] = try_read('test_fridge.txt', 20)
+        vals['ambient'] = 31.2
+        return vals
+
+    def run(self):
+        while True:
+            yield from asyncio.sleep(1)
+            vals = self.make_vals()
+            self.server.add_reading(vals)
+
+    def wort_name(self):
+        return '28-wortname'
+
+    def fridge_name(self):
+        return '28-fridgename'
--- a/py/setup_gpio.sh	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/setup_gpio.sh	Sat Jul 06 18:29:45 2019 +0800
@@ -2,15 +2,8 @@
 
 # this must run as root
 
-PINS="17 7 24 25"
-GROUP=fridgeio
+PINS="17"
 
 for PIN in $PINS; do
     echo $PIN > /sys/class/gpio/export
-
-    for f in direction value; do
-        fn=/sys/devices/virtual/gpio/gpio$PIN/$f
-        chgrp $GROUP $fn
-        chmod g+rw $fn
-    done
 done
--- a/py/tempserver.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/tempserver.py	Sat Jul 06 18:29:45 2019 +0800
@@ -5,9 +5,9 @@
 import logging
 import time
 import signal
+import asyncio
+import argparse
 
-import gevent
-import gevent.monkey
 import lockfile.pidlockfile
 import daemon
 
@@ -15,28 +15,28 @@
 from utils import L,D,EX,W
 import fridge
 import config
-import sensor_ds18b20
+import sensor
 import params
 import uploader
+import configwaiter
 
 
 class Tempserver(object):
-    def __init__(self):
+    def __init__(self, test_mode):
         self.readings = []
         self.current = (None, None)
         self.fridge = None
-        self._wakeup = gevent.event.Event()
-
-        # don't patch os, fork() is used by daemonize
-        gevent.monkey.patch_all(os=False, thread=False)
+        self._wakeup = asyncio.Event()
+        self._test_mode = test_mode
 
     def __enter__(self):
         self.params = params.Params()
         self.fridge = fridge.Fridge(self)
         self.uploader = uploader.Uploader(self)
+        self.configwaiter = configwaiter.ConfigWaiter(self)
         self.params.load()
-        self.set_sensors(sensor_ds18b20.DS18B20s(self))
-        gevent.signal(signal.SIGHUP, self._reload_signal)
+        self.set_sensors(sensor.make_sensor(self))
+        asyncio.get_event_loop().add_signal_handler(signal.SIGHUP, self.reload_signal)
         return self
 
     def __exit__(self, exc_type, exc_value, traceback):
@@ -50,16 +50,21 @@
 
         # XXX do these go here or in __enter_() ?
         self.start_time = self.now()
-        self.fridge.start()
-        self.sensors.start()
-        self.uploader.start()
+        runloops = [
+            self.fridge.run(),
+            self.sensors.run(),
+            self.uploader.run(),
+            self.configwaiter.run(),
+        ]
 
-        # won't return.
-        while True:
-            try:
-                gevent.sleep(60)
-            except KeyboardInterrupt:
-                break
+        loop = asyncio.get_event_loop()
+        try:
+            loop.run_until_complete(asyncio.gather(*runloops))
+        except KeyboardInterrupt:
+            print('\nctrl-c')
+        finally:
+            # loop.close() seems necessary otherwise get warnings about signal handlers
+            loop.close()
 
     def now(self):
         return utils.monotonic_time()
@@ -93,63 +98,88 @@
 
     def current_temps(self):
         """ returns (wort_temp, fridge_temp) tuple """
+        D("current: %s" % str(self.current))
         return self.current
 
+    @asyncio.coroutine
     def sleep(self, timeout):
         """ sleeps for timeout seconds, though wakes if the server's config is updated """
-        self._wakeup.wait(timeout)
-        
-    def _reload_signal(self):
+        # XXX fixme - we should wake on _wakeup but asyncio Condition with wait_for is a bit broken? 
+        # https://groups.google.com/forum/#!topic/python-tulip/eSm7rZAe9LM
+        # For now we just sleep, ignore the _wakeup
         try:
-            self.params.load()
-            L("Reloaded.")
+            yield from asyncio.wait_for(self._wakeup.wait(), timeout=timeout)
+        except asyncio.TimeoutError:
+            pass
+
+    def reload_signal(self, no_file = False):
+        try:
+            if not no_file:
+                self.params.load()
+                L("Reloaded.")
             self._wakeup.set()
             self._wakeup.clear()
-        except self.Error, e:
+        except Error as e:
             W("Problem reloading: %s" % str(e))
 
-def setup_logging():
+    def test_mode(self):
+        return self._test_mode
+
+def setup_logging(debug = False):
+    level = logging.INFO
+    if debug:
+        level = logging.DEBUG
     logging.basicConfig(format='%(asctime)s %(message)s', 
             datefmt='%m/%d/%Y %I:%M:%S %p',
-            level=logging.INFO)
+            level=level)
+    #logging.getLogger("asyncio").setLevel(logging.DEBUG)
 
-def start():
-    with Tempserver() as server:
+def start(test_mode):
+    with Tempserver(test_mode) as server:
         server.run()
 
 def main():
-    setup_logging()
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--hup', action='store_true')
+    parser.add_argument('--new', action='store_true')
+    parser.add_argument('-D', '--daemon', action='store_true')
+    parser.add_argument('-d', '--debug', action='store_true')
+    parser.add_argument('-t', '--test', action='store_true')
+    args = parser.parse_args()
+
+    setup_logging(args.debug)
 
     heredir = os.path.abspath(os.path.dirname(__file__))
     pidpath = os.path.join(heredir, 'tempserver.pid')
     pidf = lockfile.pidlockfile.PIDLockFile(pidpath, threaded=False)
-    do_hup = '--hup' in sys.argv
+
+
     try:
         pidf.acquire(1)
         pidf.release()
-    except (lockfile.AlreadyLocked, lockfile.LockTimeout), e:
+    except (lockfile.AlreadyLocked, lockfile.LockTimeout) as e:
         pid = pidf.read_pid()
-        if do_hup:
+        if args.hup:
             try:
                 os.kill(pid, signal.SIGHUP)
-                print>>sys.stderr, "Sent SIGHUP to process %d" % pid
+                print("Sent SIGHUP to process %d" % pid, file=sys.stderr)
                 sys.exit(0)
             except OSError:
-                print>>sys.stderr, "Process %d isn't running?" % pid
+                print("Process %d isn't running?" % pid, file=sys.stderr)
                 sys.exit(1)
 
-        print>>sys.stderr, "Locked by PID %d" % pid
+        print("Locked by PID %d" % pid, file=sys.stderr)
     
         stale = False
         if pid > 0:
-            if '--new' in sys.argv:
+            if args.new:
                 try:
                     os.kill(pid, 0)
                 except OSError:
                     stale = True
 
                 if not stale:
-                    print>>sys.stderr, "Stopping old tempserver pid %d" % pid
+                    print("Stopping old tempserver pid %d" % pid, file=sys.stderr)
                     os.kill(pid, signal.SIGTERM)
                     time.sleep(2)
                     pidf.acquire(0)
@@ -164,21 +194,21 @@
 
         if stale:
             # isn't still running, steal the lock
-            print>>sys.stderr, "Unlinking stale lockfile %s for pid %d" % (pidpath, pid)
+            print("Unlinking stale lockfile %s for pid %d" % (pidpath, pid), file=sys.stderr)
             pidf.break_lock()
 
-    if do_hup:
-        print>>sys.stderr, "Doesn't seem to be running"
+    if args.hup:
+        print("Doesn't seem to be running", file=sys.stderr)
         sys.exit(1)
 
-    if '--daemon' in sys.argv:
+    if args.daemon:
         logpath = os.path.join(os.path.dirname(__file__), 'tempserver.log')
         logf = open(logpath, 'a+')
         with daemon.DaemonContext(pidfile=pidf, stdout=logf, stderr = logf):
-            start()
+            start(args.test)
     else:
         with pidf:
-            start()
+            start(args.test)
 
 if __name__ == '__main__':
     main()
--- a/py/test.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/test.py	Sat Jul 06 18:29:45 2019 +0800
@@ -1,5 +1,5 @@
 #!/usr/bin/env python2.7
-import StringIO
+import io
 
 import unittest
 import sensor_ds18b20
@@ -50,20 +50,20 @@
             self.params.somewrongthing = 5
 
     def test_params_load(self):
-        jsbuf = StringIO.StringIO('{"fridge_setpoint": 999}')
+        jsbuf = io.StringIO('{"fridge_setpoint": 999}')
 
         self.params.load(f=jsbuf)
         self.assertEqual(self.params.fridge_setpoint, 999)
 
         with self.assertRaises(params.Params.Error):
-            jsbuf = StringIO.StringIO('{"something_else": 999}')
+            jsbuf = io.StringIO('{"something_else": 999}')
             self.params.load(f=jsbuf)
 
         with self.assertRaises(KeyError):
             x = self.params.something_else
 
     def test_params_save(self):
-        jsbuf = StringIO.StringIO()
+        jsbuf = io.StringIO()
 
         self.params.overshoot_delay = 123
         self.params.save(f=jsbuf)
--- a/py/uploader.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/uploader.py	Sat Jul 06 18:29:45 2019 +0800
@@ -1,29 +1,28 @@
 import json
 import hmac
+import hashlib
 import zlib
 import binascii
 import logging
+import asyncio
 
-import gevent
-import requests
+import aiohttp
 
 import config
 from utils import L,D,EX,W,E
 import utils
 
-class Uploader(gevent.Greenlet):
+class Uploader(object):
     def __init__(self, server):
-        gevent.Greenlet.__init__(self)
         self.server = server
 
-        requests_log = logging.getLogger("requests")
-        requests_log.setLevel(logging.WARNING)
-
-    def _run(self):
-        gevent.sleep(5)
+    @asyncio.coroutine
+    def run(self):
+        # wait for the first read
+        yield from asyncio.sleep(5)
         while True:
-            self.do()
-            self.server.sleep(config.UPLOAD_SLEEP)
+            yield from self.do()
+            yield from asyncio.sleep(config.UPLOAD_SLEEP)
 
     def get_tosend(self, readings):
         tosend = {}
@@ -37,33 +36,38 @@
         tosend['fridge_name'] = self.server.wort_name
 
         tosend['current_params'] = dict(self.server.params)
+        tosend['current_params_epoch'] = self.server.params.get_epoch()
 
         tosend['start_time'] = self.server.start_time
         tosend['uptime'] = utils.uptime()
 
         return tosend
 
+    @asyncio.coroutine
     def send(self, tosend):
         js = json.dumps(tosend)
-        js_enc = binascii.b2a_base64(zlib.compress(js))
-        mac = hmac.new(config.HMAC_KEY, js_enc).hexdigest()
-        send_data = {'data': js_enc, 'hmac': mac}
-        r = requests.post(config.UPDATE_URL, data=send_data, timeout=60)
-        result = r.text
-        if result != 'OK':
+        if self.server.test_mode():
+            D("Would upload %s to %s" % (js, config.UPDATE_URL))
+            return
+        js_enc = binascii.b2a_base64(zlib.compress(js.encode())).strip()
+        mac = hmac.new(config.HMAC_KEY.encode(), js_enc, hashlib.sha256).hexdigest()
+        send_data = {'data': js_enc.decode(), 'hmac': mac}
+        r = yield from asyncio.wait_for(aiohttp.request('post', config.UPDATE_URL, data=send_data), 60)
+        result = yield from asyncio.wait_for(r.text(), 60)
+        if r.status == 200 and result != 'OK':
             raise Exception("Server returned %s" % result)
 
+    @asyncio.coroutine
     def do(self):
-        readings = self.server.take_readings()
         try:
+        readings = self.server.take_readings()
             tosend = self.get_tosend(readings)
+            D("tosend >>>%s<<<" % str(tosend))
             nreadings = len(readings)
-            self.send(tosend)
+            yield from self.send(tosend)
             readings = None
             D("Sent updated %d readings" % nreadings)
-        except requests.exceptions.RequestException, e:
-            E("Error in uploader: %s" % str(e))
-        except Exception, e:
+        except Exception as e:
             EX("Error in uploader: %s" % str(e))
         finally:
             if readings is not None:
--- a/py/utils.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/py/utils.py	Sat Jul 06 18:29:45 2019 +0800
@@ -4,6 +4,8 @@
 import time
 import select
 import logging
+import binascii
+import json
 
 D = logging.debug
 L = logging.info
@@ -60,7 +62,7 @@
                 time.sleep(try_time)
             return None
 
-        new_f.func_name = func.func_name
+        new_f.__name__ = func.__name__
         return new_f
     return inner
 
@@ -113,7 +115,7 @@
         pid = os.fork()
         if pid > 0:
             sys.exit(0)
-    except OSError, e:
+    except OSError as e:
         E("Bad fork()")
         sys.exit(1)
 
@@ -123,13 +125,16 @@
         pid = os.fork()
         if pid > 0:
             sys.exit(0)
-    except OSError, e:
+    except OSError as e:
         E("Bad fork()")
         sys.exit(1)
 
 def uptime():
     try:
         return float(open('/proc/uptime', 'r').read().split(' ', 1)[0])
-    except Exception, e:
+    except Exception as e:
         return -1
 
+
+def json_load_round_float(s, **args):
+    return json.loads(s,parse_float = lambda f: round(float(f), 2), **args)
--- a/web/bottle.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/bottle.py	Sat Jul 06 18:29:45 2019 +0800
@@ -9,14 +9,14 @@
 
 Homepage and documentation: http://bottlepy.org/
 
-Copyright (c) 2011, Marcel Hellkamp.
+Copyright (c) 2013, Marcel Hellkamp.
 License: MIT (see LICENSE for details)
 """
 
 from __future__ import with_statement
 
 __author__ = 'Marcel Hellkamp'
-__version__ = '0.11.dev'
+__version__ = '0.12.8'
 __license__ = 'MIT'
 
 # The gevent server adapter needs to patch some modules before they are imported
@@ -36,15 +36,18 @@
         import gevent.monkey; gevent.monkey.patch_all()
 
 import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
-        os, re, subprocess, sys, tempfile, threading, time, urllib, warnings
+        os, re, subprocess, sys, tempfile, threading, time, warnings
 
 from datetime import date as datedate, datetime, timedelta
 from tempfile import TemporaryFile
 from traceback import format_exc, print_exc
-
-try: from json import dumps as json_dumps, loads as json_lds
+from inspect import getargspec
+from unicodedata import normalize
+
+
+try: from simplejson import dumps as json_dumps, loads as json_lds
 except ImportError: # pragma: no cover
-    try: from simplejson import dumps as json_dumps, loads as json_lds
+    try: from json import dumps as json_dumps, loads as json_lds
     except ImportError:
         try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
         except ImportError:
@@ -58,8 +61,9 @@
 # It ain't pretty but it works... Sorry for the mess.
 
 py   = sys.version_info
-py3k = py >= (3,0,0)
-py25 = py <  (2,6,0)
+py3k = py >= (3, 0, 0)
+py25 = py <  (2, 6, 0)
+py31 = (3, 1, 0) <= py < (3, 2, 0)
 
 # Workaround for the missing "as" keyword in py3k.
 def _e(): return sys.exc_info()[1]
@@ -76,17 +80,20 @@
 if py3k:
     import http.client as httplib
     import _thread as thread
-    from urllib.parse import urljoin, parse_qsl, SplitResult as UrlSplitResult
+    from urllib.parse import urljoin, SplitResult as UrlSplitResult
     from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
+    urlunquote = functools.partial(urlunquote, encoding='latin1')
     from http.cookies import SimpleCookie
     from collections import MutableMapping as DictMixin
     import pickle
     from io import BytesIO
+    from configparser import ConfigParser
     basestring = str
     unicode = str
     json_loads = lambda s: json_lds(touni(s))
     callable = lambda x: hasattr(x, '__call__')
     imap = map
+    def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
 else: # 2.x
     import httplib
     import thread
@@ -96,17 +103,18 @@
     from itertools import imap
     import cPickle as pickle
     from StringIO import StringIO as BytesIO
+    from ConfigParser import SafeConfigParser as ConfigParser
     if py25:
-        msg = "Python 2.5 support may be dropped in future versions of Bottle."
+        msg  = "Python 2.5 support may be dropped in future versions of Bottle."
         warnings.warn(msg, DeprecationWarning)
-        from cgi import parse_qsl
         from UserDict import DictMixin
         def next(it): return it.next()
         bytes = str
     else: # 2.6, 2.7
-        from urlparse import parse_qsl
         from collections import MutableMapping as DictMixin
+    unicode = unicode
     json_loads = json_lds
+    eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
 
 # Some helpers for string/byte handling
 def tob(s, enc='utf8'):
@@ -117,12 +125,12 @@
 
 # 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
 # 3.1 needs a workaround.
-NCTextIOWrapper = None
-if (3,0,0) < py < (3,2,0):
+if py31:
     from io import TextIOWrapper
     class NCTextIOWrapper(TextIOWrapper):
         def close(self): pass # Keep wrapped buffer open.
 
+
 # A bug in functools causes it to break if the wrapper is an instance method
 def update_wrapper(wrapper, wrapped, *a, **ka):
     try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
@@ -133,7 +141,7 @@
 # These helpers are used at module level and need to be defined first.
 # And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
 
-def depr(message):
+def depr(message, hard=False):
     warnings.warn(message, DeprecationWarning, stacklevel=3)
 
 def makelist(data): # This is just to handy
@@ -173,6 +181,7 @@
         property. '''
 
     def __init__(self, func):
+        self.__doc__ = getattr(func, '__doc__')
         self.func = func
 
     def __get__(self, obj, cls):
@@ -207,34 +216,6 @@
     pass
 
 
-#TODO: This should subclass BaseRequest
-class HTTPResponse(BottleException):
-    """ Used to break execution and immediately finish the response """
-    def __init__(self, output='', status=200, header=None):
-        super(BottleException, self).__init__("HTTP Response %d" % status)
-        self.status = int(status)
-        self.output = output
-        self.headers = HeaderDict(header) if header else None
-
-    def apply(self, response):
-        if self.headers:
-            for key, value in self.headers.allitems():
-                response.headers[key] = value
-        response.status = self.status
-
-
-class HTTPError(HTTPResponse):
-    """ Used to generate an error page """
-    def __init__(self, code=500, output='Unknown Error', exception=None,
-                 traceback=None, header=None):
-        super(HTTPError, self).__init__(output, code, header)
-        self.exception = exception
-        self.traceback = traceback
-
-    def __repr__(self):
-        return tonat(template(ERROR_PAGE_TEMPLATE, e=self))
-
-
 
 
 
@@ -256,11 +237,19 @@
 
 
 class RouteSyntaxError(RouteError):
-    """ The route parser found something not supported by this router """
+    """ The route parser found something not supported by this router. """
 
 
 class RouteBuildError(RouteError):
-    """ The route could not been built """
+    """ The route could not be built. """
+
+
+def _re_flatten(p):
+    ''' Turn all capturing groups in a regular expression pattern into
+        non-capturing groups. '''
+    if '(' not in p: return p
+    return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
+        lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
 
 
 class Router(object):
@@ -276,34 +265,27 @@
     '''
 
     default_pattern = '[^/]+'
-    default_filter   = 're'
-    #: Sorry for the mess. It works. Trust me.
-    rule_syntax = re.compile('(\\\\*)'\
-        '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
-          '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
-            '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
+    default_filter  = 're'
+
+    #: The current CPython regexp implementation does not allow more
+    #: than 99 matching groups per regular expression.
+    _MAX_GROUPS_PER_PATTERN = 99
 
     def __init__(self, strict=False):
-        self.rules    = {} # A {rule: Rule} mapping
-        self.builder  = {} # A rule/name->build_info mapping
-        self.static   = {} # Cache for static routes: {path: {method: target}}
-        self.dynamic  = [] # Cache for dynamic routes. See _compile()
+        self.rules    = [] # All rules in order
+        self._groups  = {} # index of regexes to find them in dyna_routes
+        self.builder  = {} # Data structure for the url builder
+        self.static   = {} # Search structure for static routes
+        self.dyna_routes   = {}
+        self.dyna_regexes  = {} # Search structure for dynamic routes
         #: If true, static routes are no longer checked first.
         self.strict_order = strict
-        self.filters = {'re': self.re_filter, 'int': self.int_filter,
-                        'float': self.float_filter, 'path': self.path_filter}
-
-    def re_filter(self, conf):
-        return conf or self.default_pattern, None, None
-
-    def int_filter(self, conf):
-        return r'-?\d+', int, lambda x: str(int(x))
-
-    def float_filter(self, conf):
-        return r'-?[\d.]+', float, lambda x: str(float(x))
-
-    def path_filter(self, conf):
-        return r'.+?', None, None
+        self.filters = {
+            're':    lambda conf:
+                (_re_flatten(conf or self.default_pattern), None, None),
+            'int':   lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
+            'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
+            'path':  lambda conf: (r'.+?', None, None)}
 
     def add_filter(self, name, func):
         ''' Add a filter. The provided function is called with the configuration
@@ -311,9 +293,12 @@
         The first element is a string, the last two are callables or None. '''
         self.filters[name] = func
 
-    def parse_rule(self, rule):
-        ''' Parses a rule into a (name, filter, conf) token stream. If mode is
-            None, name contains a static rule part. '''
+    rule_syntax = re.compile('(\\\\*)'\
+        '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
+          '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
+            '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
+
+    def _itertokens(self, rule):
         offset, prefix = 0, ''
         for match in self.rule_syntax.finditer(rule):
             prefix += rule[offset:match.start()]
@@ -322,77 +307,95 @@
                 prefix += match.group(0)[len(g[0]):]
                 offset = match.end()
                 continue
-            if prefix: yield prefix, None, None
-            name, filtr, conf = g[1:4] if not g[2] is None else g[4:7]
-            if not filtr: filtr = self.default_filter
-            yield name, filtr, conf or None
+            if prefix:
+                yield prefix, None, None
+            name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
+            yield name, filtr or 'default', conf or None
             offset, prefix = match.end(), ''
         if offset <= len(rule) or prefix:
             yield prefix+rule[offset:], None, None
 
     def add(self, rule, method, target, name=None):
-        ''' Add a new route or replace the target for an existing route. '''
-        if rule in self.rules:
-            self.rules[rule][method] = target
-            if name: self.builder[name] = self.builder[rule]
-            return
-
-        target = self.rules[rule] = {method: target}
-
-        # Build pattern and other structures for dynamic routes
-        anons = 0      # Number of anonymous wildcards
-        pattern = ''   # Regular expression  pattern
-        filters = []   # Lists of wildcard input filters
-        builder = []   # Data structure for the URL builder
+        ''' Add a new rule or replace the target for an existing rule. '''
+        anons     = 0    # Number of anonymous wildcards found
+        keys      = []   # Names of keys
+        pattern   = ''   # Regular expression pattern with named groups
+        filters   = []   # Lists of wildcard input filters
+        builder   = []   # Data structure for the URL builder
         is_static = True
-        for key, mode, conf in self.parse_rule(rule):
+
+        for key, mode, conf in self._itertokens(rule):
             if mode:
                 is_static = False
+                if mode == 'default': mode = self.default_filter
                 mask, in_filter, out_filter = self.filters[mode](conf)
-                if key:
+                if not key:
+                    pattern += '(?:%s)' % mask
+                    key = 'anon%d' % anons
+                    anons += 1
+                else:
                     pattern += '(?P<%s>%s)' % (key, mask)
-                else:
-                    pattern += '(?:%s)' % mask
-                    key = 'anon%d' % anons; anons += 1
+                    keys.append(key)
                 if in_filter: filters.append((key, in_filter))
                 builder.append((key, out_filter or str))
             elif key:
                 pattern += re.escape(key)
                 builder.append((None, key))
+
         self.builder[rule] = builder
         if name: self.builder[name] = builder
 
         if is_static and not self.strict_order:
-            self.static[self.build(rule)] = target
+            self.static.setdefault(method, {})
+            self.static[method][self.build(rule)] = (target, None)
             return
 
-        def fpat_sub(m):
-            return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:'
-        flat_pattern = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, pattern)
-
         try:
-            re_match = re.compile('^(%s)$' % pattern).match
+            re_pattern = re.compile('^(%s)$' % pattern)
+            re_match = re_pattern.match
         except re.error:
             raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
 
-        def match(path):
-            """ Return an url-argument dictionary. """
-            url_args = re_match(path).groupdict()
-            for name, wildcard_filter in filters:
-                try:
-                    url_args[name] = wildcard_filter(url_args[name])
-                except ValueError:
-                    raise HTTPError(400, 'Path has wrong format.')
-            return url_args
-
-        try:
-            combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, flat_pattern)
-            self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1])
-            self.dynamic[-1][1].append((match, target))
-        except (AssertionError, IndexError): # AssertionError: Too many groups
-            self.dynamic.append((re.compile('(^%s$)' % flat_pattern),
-                                [(match, target)]))
-        return match
+        if filters:
+            def getargs(path):
+                url_args = re_match(path).groupdict()
+                for name, wildcard_filter in filters:
+                    try:
+                        url_args[name] = wildcard_filter(url_args[name])
+                    except ValueError:
+                        raise HTTPError(400, 'Path has wrong format.')
+                return url_args
+        elif re_pattern.groupindex:
+            def getargs(path):
+                return re_match(path).groupdict()
+        else:
+            getargs = None
+
+        flatpat = _re_flatten(pattern)
+        whole_rule = (rule, flatpat, target, getargs)
+
+        if (flatpat, method) in self._groups:
+            if DEBUG:
+                msg = 'Route <%s %s> overwrites a previously defined route'
+                warnings.warn(msg % (method, rule), RuntimeWarning)
+            self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
+        else:
+            self.dyna_routes.setdefault(method, []).append(whole_rule)
+            self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
+
+        self._compile(method)
+
+    def _compile(self, method):
+        all_rules = self.dyna_routes[method]
+        comborules = self.dyna_regexes[method] = []
+        maxgroups = self._MAX_GROUPS_PER_PATTERN
+        for x in range(0, len(all_rules), maxgroups):
+            some = all_rules[x:x+maxgroups]
+            combined = (flatpat for (_, flatpat, _, _) in some)
+            combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
+            combined = re.compile(combined).match
+            rules = [(target, getargs) for (_, _, target, getargs) in some]
+            comborules.append((combined, rules))
 
     def build(self, _name, *anons, **query):
         ''' Build an URL by filling the wildcards in a rule. '''
@@ -407,31 +410,46 @@
 
     def match(self, environ):
         ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
-        path, targets, urlargs = environ['PATH_INFO'] or '/', None, {}
-        if path in self.static:
-            targets = self.static[path]
+        verb = environ['REQUEST_METHOD'].upper()
+        path = environ['PATH_INFO'] or '/'
+        target = None
+        if verb == 'HEAD':
+            methods = ['PROXY', verb, 'GET', 'ANY']
         else:
-            for combined, rules in self.dynamic:
-                match = combined.match(path)
-                if not match: continue
-                getargs, targets = rules[match.lastindex - 1]
-                urlargs = getargs(path) if getargs else {}
-                break
-
-        if not targets:
-            raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO']))
-        method = environ['REQUEST_METHOD'].upper()
-        if method in targets:
-            return targets[method], urlargs
-        if method == 'HEAD' and 'GET' in targets:
-            return targets['GET'], urlargs
-        if 'ANY' in targets:
-            return targets['ANY'], urlargs
-        allowed = [verb for verb in targets if verb != 'ANY']
-        if 'GET' in allowed and 'HEAD' not in allowed:
-            allowed.append('HEAD')
-        raise HTTPError(405, "Method not allowed.",
-                        header=[('Allow',",".join(allowed))])
+            methods = ['PROXY', verb, 'ANY']
+
+        for method in methods:
+            if method in self.static and path in self.static[method]:
+                target, getargs = self.static[method][path]
+                return target, getargs(path) if getargs else {}
+            elif method in self.dyna_regexes:
+                for combined, rules in self.dyna_regexes[method]:
+                    match = combined(path)
+                    if match:
+                        target, getargs = rules[match.lastindex - 1]
+                        return target, getargs(path) if getargs else {}
+
+        # No matching route found. Collect alternative methods for 405 response
+        allowed = set([])
+        nocheck = set(methods)
+        for method in set(self.static) - nocheck:
+            if path in self.static[method]:
+                allowed.add(verb)
+        for method in set(self.dyna_regexes) - allowed - nocheck:
+            for combined, rules in self.dyna_regexes[method]:
+                match = combined(path)
+                if match:
+                    allowed.add(method)
+        if allowed:
+            allow_header = ",".join(sorted(allowed))
+            raise HTTPError(405, "Method not allowed.", Allow=allow_header)
+
+        # No matching route and no alternative method found. We give up
+        raise HTTPError(404, "Not found: " + repr(path))
+
+
+
+
 
 
 class Route(object):
@@ -459,12 +477,12 @@
         #: Additional keyword arguments passed to the :meth:`Bottle.route`
         #: decorator are stored in this dictionary. Used for route-specific
         #: plugin configuration and meta-data.
-        self.config = ConfigDict(config)
+        self.config = ConfigDict().load_dict(config, make_namespaces=True)
 
     def __call__(self, *a, **ka):
         depr("Some APIs changed to return Route() instances instead of"\
              " callables. Make sure to use the Route.call method and not to"\
-             " call Route instances directly.")
+             " call Route instances directly.") #0.12
         return self.call(*a, **ka)
 
     @cached_property
@@ -484,7 +502,7 @@
 
     @property
     def _context(self):
-        depr('Switch to Plugin API v2 and access the Route object directly.')
+        depr('Switch to Plugin API v2 and access the Route object directly.')  #0.12
         return dict(rule=self.rule, method=self.method, callback=self.callback,
                     name=self.name, app=self.app, config=self.config,
                     apply=self.plugins, skip=self.skiplist)
@@ -516,8 +534,32 @@
                 update_wrapper(callback, self.callback)
         return callback
 
+    def get_undecorated_callback(self):
+        ''' Return the callback. If the callback is a decorated function, try to
+            recover the original function. '''
+        func = self.callback
+        func = getattr(func, '__func__' if py3k else 'im_func', func)
+        closure_attr = '__closure__' if py3k else 'func_closure'
+        while hasattr(func, closure_attr) and getattr(func, closure_attr):
+            func = getattr(func, closure_attr)[0].cell_contents
+        return func
+
+    def get_callback_args(self):
+        ''' Return a list of argument names the callback (most likely) accepts
+            as keyword arguments. If the callback is a decorated function, try
+            to recover the original function before inspection. '''
+        return getargspec(self.get_undecorated_callback())[0]
+
+    def get_config(self, key, default=None):
+        ''' Lookup a config field and return its value, first checking the
+            route.config, then route.app.config.'''
+        for conf in (self.config, self.app.conifg):
+            if key in conf: return conf[key]
+        return default
+
     def __repr__(self):
-        return '<%s %r %r>' % (self.method, self.rule, self.callback)
+        cb = self.get_undecorated_callback()
+        return '<%s %r %r>' % (self.method, self.rule, cb)
 
 
 
@@ -539,28 +581,71 @@
     """
 
     def __init__(self, catchall=True, autojson=True):
-        #: If true, most exceptions are caught and returned as :exc:`HTTPError`
-        self.catchall = catchall
-
-        #: A :cls:`ResourceManager` for application files
+
+        #: A :class:`ConfigDict` for app specific configuration.
+        self.config = ConfigDict()
+        self.config._on_change = functools.partial(self.trigger_hook, 'config')
+        self.config.meta_set('autojson', 'validate', bool)
+        self.config.meta_set('catchall', 'validate', bool)
+        self.config['catchall'] = catchall
+        self.config['autojson'] = autojson
+
+        #: A :class:`ResourceManager` for application files
         self.resources = ResourceManager()
 
-        #: A :cls:`ConfigDict` for app specific configuration.
-        self.config = ConfigDict()
-        self.config.autojson = autojson
-
         self.routes = [] # List of installed :class:`Route` instances.
         self.router = Router() # Maps requests to :class:`Route` instances.
         self.error_handler = {}
 
         # Core plugins
         self.plugins = [] # List of installed plugins.
-        self.hooks = HooksPlugin()
-        self.install(self.hooks)
-        if self.config.autojson:
+        if self.config['autojson']:
             self.install(JSONPlugin())
         self.install(TemplatePlugin())
 
+    #: If true, most exceptions are caught and returned as :exc:`HTTPError`
+    catchall = DictProperty('config', 'catchall')
+
+    __hook_names = 'before_request', 'after_request', 'app_reset', 'config'
+    __hook_reversed = 'after_request'
+
+    @cached_property
+    def _hooks(self):
+        return dict((name, []) for name in self.__hook_names)
+
+    def add_hook(self, name, func):
+        ''' Attach a callback to a hook. Three hooks are currently implemented:
+
+            before_request
+                Executed once before each request. The request context is
+                available, but no routing has happened yet.
+            after_request
+                Executed once after each request regardless of its outcome.
+            app_reset
+                Called whenever :meth:`Bottle.reset` is called.
+        '''
+        if name in self.__hook_reversed:
+            self._hooks[name].insert(0, func)
+        else:
+            self._hooks[name].append(func)
+
+    def remove_hook(self, name, func):
+        ''' Remove a callback from a hook. '''
+        if name in self._hooks and func in self._hooks[name]:
+            self._hooks[name].remove(func)
+            return True
+
+    def trigger_hook(self, __name, *args, **kwargs):
+        ''' Trigger a hook and return a list of results. '''
+        return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
+
+    def hook(self, name):
+        """ Return a decorator that attaches a callback to a hook. See
+            :meth:`add_hook` for details."""
+        def decorator(func):
+            self.add_hook(name, func)
+            return func
+        return decorator
 
     def mount(self, prefix, app, **options):
         ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
@@ -575,8 +660,7 @@
             All other parameters are passed to the underlying :meth:`route` call.
         '''
         if isinstance(app, basestring):
-            prefix, app = app, prefix
-            depr('Parameter order of Bottle.mount() changed.') # 0.10
+            depr('Parameter order of Bottle.mount() changed.', True) # 0.10
 
         segments = [p for p in prefix.split('/') if p]
         if not segments: raise ValueError('Empty path prefix.')
@@ -585,19 +669,25 @@
         def mountpoint_wrapper():
             try:
                 request.path_shift(path_depth)
-                rs = BaseResponse([], 200)
-                def start_response(status, header):
+                rs = HTTPResponse([])
+                def start_response(status, headerlist, exc_info=None):
+                    if exc_info:
+                        try:
+                            _raise(*exc_info)
+                        finally:
+                            exc_info = None
                     rs.status = status
-                    for name, value in header: rs.add_header(name, value)
+                    for name, value in headerlist: rs.add_header(name, value)
                     return rs.body.append
                 body = app(request.environ, start_response)
-                body = itertools.chain(rs.body, body)
-                return HTTPResponse(body, rs.status_code, rs.headers)
+                if body and rs.body: body = itertools.chain(rs.body, body)
+                rs.body = body or rs.body
+                return rs
             finally:
                 request.path_shift(-path_depth)
 
         options.setdefault('skip', True)
-        options.setdefault('method', 'ANY')
+        options.setdefault('method', 'PROXY')
         options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
         options['callback'] = mountpoint_wrapper
 
@@ -642,10 +732,6 @@
         if removed: self.reset()
         return removed
 
-    def run(self, **kwargs):
-        ''' Calls :func:`run` with the same parameters. '''
-        run(self, **kwargs)
-
     def reset(self, route=None):
         ''' Reset all routes (force plugins to be re-applied) and clear all
             caches. If an ID or route object is given, only that specific route
@@ -656,7 +742,7 @@
         for route in routes: route.reset()
         if DEBUG:
             for route in routes: route.prepare()
-        self.hooks.trigger('app_reset')
+        self.trigger_hook('app_reset')
 
     def close(self):
         ''' Close the application and all installed plugins. '''
@@ -664,6 +750,10 @@
             if hasattr(plugin, 'close'): plugin.close()
         self.stopped = True
 
+    def run(self, **kwargs):
+        ''' Calls :func:`run` with the same parameters. '''
+        run(self, **kwargs)
+
     def match(self, environ):
         """ Search for a matching route and return a (:class:`Route` , urlargs)
             tuple. The second value is a dictionary with parameters extracted
@@ -748,39 +838,31 @@
             return handler
         return wrapper
 
-    def hook(self, name):
-        """ Return a decorator that attaches a callback to a hook. Three hooks
-            are currently implemented:
-
-            - before_request: Executed once before each request
-            - after_request: Executed once after each request
-            - app_reset: Called whenever :meth:`reset` is called.
-        """
-        def wrapper(func):
-            self.hooks.add(name, func)
-            return func
-        return wrapper
-
-    def handle(self, path, method='GET'):
-        """ (deprecated) Execute the first matching route callback and return
-            the result. :exc:`HTTPResponse` exceptions are caught and returned.
-            If :attr:`Bottle.catchall` is true, other exceptions are caught as
-            well and returned as :exc:`HTTPError` instances (500).
-        """
-        depr("This method will change semantics in 0.10. Try to avoid it.")
-        if isinstance(path, dict):
-            return self._handle(path)
-        return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()})
+    def default_error_handler(self, res):
+        return tob(template(ERROR_PAGE_TEMPLATE, e=res))
 
     def _handle(self, environ):
+        path = environ['bottle.raw_path'] = environ['PATH_INFO']
+        if py3k:
+            try:
+                environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
+            except UnicodeError:
+                return HTTPError(400, 'Invalid path string. Expected UTF-8')
+
         try:
             environ['bottle.app'] = self
             request.bind(environ)
             response.bind()
-            route, args = self.router.match(environ)
-            environ['route.handle'] = environ['bottle.route'] = route
-            environ['route.url_args'] = args
-            return route.call(**args)
+            try:
+                self.trigger_hook('before_request')
+                route, args = self.router.match(environ)
+                environ['route.handle'] = route
+                environ['bottle.route'] = route
+                environ['route.url_args'] = args
+                return route.call(**args)
+            finally:
+                self.trigger_hook('after_request')
+
         except HTTPResponse:
             return _e()
         except RouteReset:
@@ -803,7 +885,8 @@
 
         # Empty output is done here
         if not out:
-            response['Content-Length'] = 0
+            if 'Content-Length' not in response:
+                response['Content-Length'] = 0
             return []
         # Join lists of byte or unicode strings. Mixed lists are NOT supported
         if isinstance(out, (tuple, list))\
@@ -814,19 +897,18 @@
             out = out.encode(response.charset)
         # Byte Strings are just returned
         if isinstance(out, bytes):
-            response['Content-Length'] = len(out)
+            if 'Content-Length' not in response:
+                response['Content-Length'] = len(out)
             return [out]
         # HTTPError or HTTPException (recursive, because they may wrap anything)
         # TODO: Handle these explicitly in handle() or make them iterable.
         if isinstance(out, HTTPError):
             out.apply(response)
-            out = self.error_handler.get(out.status, repr)(out)
-            if isinstance(out, HTTPResponse):
-                depr('Error handlers must not return :exc:`HTTPResponse`.') #0.9
+            out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
             return self._cast(out)
         if isinstance(out, HTTPResponse):
             out.apply(response)
-            return self._cast(out.output)
+            return self._cast(out.body)
 
         # File-like objects.
         if hasattr(out, 'read'):
@@ -837,10 +919,10 @@
 
         # Handle Iterables. We peek into them to detect their inner type.
         try:
-            out = iter(out)
-            first = next(out)
+            iout = iter(out)
+            first = next(iout)
             while not first:
-                first = next(out)
+                first = next(iout)
         except StopIteration:
             return self._cast('')
         except HTTPResponse:
@@ -854,13 +936,17 @@
         # These are the inner types allowed in iterator or generator objects.
         if isinstance(first, HTTPResponse):
             return self._cast(first)
-        if isinstance(first, bytes):
-            return itertools.chain([first], out)
-        if isinstance(first, unicode):
-            return imap(lambda x: x.encode(response.charset),
-                                  itertools.chain([first], out))
-        return self._cast(HTTPError(500, 'Unsupported response type: %s'\
-                                         % type(first)))
+        elif isinstance(first, bytes):
+            new_iter = itertools.chain([first], iout)
+        elif isinstance(first, unicode):
+            encoder = lambda x: x.encode(response.charset)
+            new_iter = imap(encoder, itertools.chain([first], iout))
+        else:
+            msg = 'Unsupported response type: %s' % type(first)
+            return self._cast(HTTPError(500, msg))
+        if hasattr(out, 'close'):
+            new_iter = _closeiter(new_iter, out.close)
+        return new_iter
 
     def wsgi(self, environ, start_response):
         """ The bottle WSGI-interface. """
@@ -868,12 +954,10 @@
             out = self._cast(self._handle(environ))
             # rfc2616 section 4.3
             if response._status_code in (100, 101, 204, 304)\
-            or request.method == 'HEAD':
+            or environ['REQUEST_METHOD'] == 'HEAD':
                 if hasattr(out, 'close'): out.close()
                 out = []
-            if isinstance(response._status_line, unicode):
-              response._status_line = str(response._status_line)
-            start_response(response._status_line, list(response.iter_headers()))
+            start_response(response._status_line, response.headerlist)
             return out
         except (KeyboardInterrupt, SystemExit, MemoryError):
             raise
@@ -887,7 +971,7 @@
                        % (html_escape(repr(_e())), html_escape(format_exc()))
             environ['wsgi.errors'].write(err)
             headers = [('Content-Type', 'text/html; charset=UTF-8')]
-            start_response('500 INTERNAL SERVER ERROR', headers)
+            start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
             return [tob(err)]
 
     def __call__(self, environ, start_response):
@@ -903,7 +987,6 @@
 # HTTP and WSGI Tools ##########################################################
 ###############################################################################
 
-
 class BaseRequest(object):
     """ A wrapper for WSGI environment dictionaries that adds a lot of
         convenient access methods and properties. Most of them are read-only.
@@ -917,8 +1000,6 @@
 
     #: Maximum size of memory buffer for :attr:`body` in bytes.
     MEMFILE_MAX = 102400
-    #: Maximum number pr GET or POST parameters per request
-    MAX_PARAMS  = 100
 
     def __init__(self, environ=None):
         """ Wrap a WSGI environ dictionary. """
@@ -932,6 +1013,16 @@
         ''' Bottle application handling this request. '''
         raise RuntimeError('This request is not connected to an application.')
 
+    @DictProperty('environ', 'bottle.route', read_only=True)
+    def route(self):
+        """ The bottle :class:`Route` object that matches this request. """
+        raise RuntimeError('This request is not connected to a route.')
+
+    @DictProperty('environ', 'route.url_args', read_only=True)
+    def url_args(self):
+        """ The arguments extracted from the URL. """
+        raise RuntimeError('This request is not connected to a route.')
+
     @property
     def path(self):
         ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
@@ -957,8 +1048,7 @@
     def cookies(self):
         """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
             decoded. Use :meth:`get_cookie` if you expect signed cookies. """
-        cookies = SimpleCookie(self.environ.get('HTTP_COOKIE',''))
-        cookies = list(cookies.values())[:self.MAX_PARAMS]
+        cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
         return FormsDict((c.key, c.value) for c in cookies)
 
     def get_cookie(self, key, default=None, secret=None):
@@ -978,21 +1068,21 @@
             values are sometimes called "URL arguments" or "GET parameters", but
             not to be confused with "URL wildcards" as they are provided by the
             :class:`Router`. '''
-        pairs = parse_qsl(self.query_string, keep_blank_values=True)
         get = self.environ['bottle.get'] = FormsDict()
-        for key, value in pairs[:self.MAX_PARAMS]:
+        pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
+        for key, value in pairs:
             get[key] = value
         return get
 
     @DictProperty('environ', 'bottle.request.forms', read_only=True)
     def forms(self):
         """ Form values parsed from an `url-encoded` or `multipart/form-data`
-            encoded POST or PUT request body. The result is retuned as a
+            encoded POST or PUT request body. The result is returned as a
             :class:`FormsDict`. All keys and values are strings. File uploads
             are stored separately in :attr:`files`. """
         forms = FormsDict()
         for name, item in self.POST.allitems():
-            if not hasattr(item, 'filename'):
+            if not isinstance(item, FileUpload):
                 forms[name] = item
         return forms
 
@@ -1009,24 +1099,13 @@
 
     @DictProperty('environ', 'bottle.request.files', read_only=True)
     def files(self):
-        """ File uploads parsed from an `url-encoded` or `multipart/form-data`
-            encoded POST or PUT request body. The values are instances of
-            :class:`cgi.FieldStorage`. The most important attributes are:
-
-            filename
-                The filename, if specified; otherwise None; this is the client
-                side filename, *not* the file name on which it is stored (that's
-                a temporary file you don't deal with)
-            file
-                The file(-like) object from which you can read the data.
-            value
-                The value as a *string*; for file uploads, this transparently
-                reads the file every time you request the value. Do not do this
-                on big files.
+        """ File uploads parsed from `multipart/form-data` encoded POST or PUT
+            request body. The values are instances of :class:`FileUpload`.
+
         """
         files = FormsDict()
         for name, item in self.POST.allitems():
-            if hasattr(item, 'filename'):
+            if isinstance(item, FileUpload):
                 files[name] = item
         return files
 
@@ -1036,25 +1115,78 @@
             property holds the parsed content of the request body. Only requests
             smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
             exhaustion. '''
-        if 'application/json' in self.environ.get('CONTENT_TYPE', '') \
-        and 0 < self.content_length < self.MEMFILE_MAX:
-            return json_loads(self.body.read(self.MEMFILE_MAX))
+        ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
+        if ctype == 'application/json':
+            b = self._get_body_string()
+            if not b:
+                return None
+            return json_loads(b)
         return None
 
+    def _iter_body(self, read, bufsize):
+        maxread = max(0, self.content_length)
+        while maxread:
+            part = read(min(maxread, bufsize))
+            if not part: break
+            yield part
+            maxread -= len(part)
+
+    def _iter_chunked(self, read, bufsize):
+        err = HTTPError(400, 'Error while parsing chunked transfer body.')
+        rn, sem, bs = tob('\r\n'), tob(';'), tob('')
+        while True:
+            header = read(1)
+            while header[-2:] != rn:
+                c = read(1)
+                header += c
+                if not c: raise err
+                if len(header) > bufsize: raise err
+            size, _, _ = header.partition(sem)
+            try:
+                maxread = int(tonat(size.strip()), 16)
+            except ValueError:
+                raise err
+            if maxread == 0: break
+            buff = bs
+            while maxread > 0:
+                if not buff:
+                    buff = read(min(maxread, bufsize))
+                part, buff = buff[:maxread], buff[maxread:]
+                if not part: raise err
+                yield part
+                maxread -= len(part)
+            if read(2) != rn:
+                raise err
+
     @DictProperty('environ', 'bottle.request.body', read_only=True)
     def _body(self):
-        maxread = max(0, self.content_length)
-        stream = self.environ['wsgi.input']
-        body = BytesIO() if maxread < self.MEMFILE_MAX else TemporaryFile(mode='w+b')
-        while maxread > 0:
-            part = stream.read(min(maxread, self.MEMFILE_MAX))
-            if not part: break
+        body_iter = self._iter_chunked if self.chunked else self._iter_body
+        read_func = self.environ['wsgi.input'].read
+        body, body_size, is_temp_file = BytesIO(), 0, False
+        for part in body_iter(read_func, self.MEMFILE_MAX):
             body.write(part)
-            maxread -= len(part)
+            body_size += len(part)
+            if not is_temp_file and body_size > self.MEMFILE_MAX:
+                body, tmp = TemporaryFile(mode='w+b'), body
+                body.write(tmp.getvalue())
+                del tmp
+                is_temp_file = True
         self.environ['wsgi.input'] = body
         body.seek(0)
         return body
 
+    def _get_body_string(self):
+        ''' read body until content-length or MEMFILE_MAX into a string. Raise
+            HTTPError(413) on requests that are to large. '''
+        clen = self.content_length
+        if clen > self.MEMFILE_MAX:
+            raise HTTPError(413, 'Request to large')
+        if clen < 0: clen = self.MEMFILE_MAX + 1
+        data = self.body.read(clen)
+        if len(data) > self.MEMFILE_MAX: # Fail fast
+            raise HTTPError(413, 'Request to large')
+        return data
+
     @property
     def body(self):
         """ The HTTP request body as a seek-able file-like object. Depending on
@@ -1065,6 +1197,11 @@
         self._body.seek(0)
         return self._body
 
+    @property
+    def chunked(self):
+        ''' True if Chunked transfer encoding was. '''
+        return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
+
     #: An alias for :attr:`query`.
     GET = query
 
@@ -1075,25 +1212,35 @@
             instances of :class:`cgi.FieldStorage` (file uploads).
         """
         post = FormsDict()
+        # We default to application/x-www-form-urlencoded for everything that
+        # is not multipart and take the fast path (also: 3.1 workaround)
+        if not self.content_type.startswith('multipart/'):
+            pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
+            for key, value in pairs:
+                post[key] = value
+            return post
+
         safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
         for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
             if key in self.environ: safe_env[key] = self.environ[key]
-        if NCTextIOWrapper:
-            fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n')
-        else:
-            fb = self.body
-        data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True)
-        for item in (data.list or [])[:self.MAX_PARAMS]:
-            post[item.name] = item if item.filename else item.value
+        args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
+        if py31:
+            args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
+                                         newline='\n')
+        elif py3k:
+            args['encoding'] = 'utf8'
+        data = cgi.FieldStorage(**args)
+        self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958
+        data = data.list or []
+        for item in data:
+            if item.filename:
+                post[item.name] = FileUpload(item.file, item.name,
+                                             item.filename, item.headers)
+            else:
+                post[item.name] = item.value
         return post
 
     @property
-    def COOKIES(self):
-        ''' Alias for :attr:`cookies` (deprecated). '''
-        depr('BaseRequest.COOKIES was renamed to BaseRequest.cookies (lowercase).')
-        return self.cookies
-
-    @property
     def url(self):
         """ The full request URI including hostname and scheme. If your app
             lives behind a reverse proxy or load balancer and you get confusing
@@ -1108,7 +1255,7 @@
             but the fragment is always empty because it is not visible to the
             server. '''
         env = self.environ
-        http = env.get('wsgi.url_scheme', 'http')
+        http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
         host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
         if not host:
             # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
@@ -1157,6 +1304,11 @@
         return int(self.environ.get('CONTENT_LENGTH') or -1)
 
     @property
+    def content_type(self):
+        ''' The Content-Type header as a lowercase-string (default: empty). '''
+        return self.environ.get('CONTENT_TYPE', '').lower()
+
+    @property
     def is_xhr(self):
         ''' True if the request was triggered by a XMLHttpRequest. This only
             works with JavaScript libraries that support the `X-Requested-With`
@@ -1239,7 +1391,7 @@
             var = self.environ['bottle.request.ext.%s'%name]
             return var.__get__(self) if hasattr(var, '__get__') else var
         except KeyError:
-            raise AttributeError('Attribute %r not defined.' % name)       
+            raise AttributeError('Attribute %r not defined.' % name)
 
     def __setattr__(self, name, value):
         if name == 'environ': return object.__setattr__(self, name, value)
@@ -1276,6 +1428,14 @@
         This class does support dict-like case-insensitive item-access to
         headers, but is NOT a dict. Most notably, iterating over a response
         yields parts of the body and not the headers.
+
+        :param body: The response body as one of the supported types.
+        :param status: Either an HTTP status code (e.g. 200) or a status line
+                       including the reason phrase (e.g. '200 OK').
+        :param headers: A dictionary or a list of name-value pairs.
+
+        Additional keyword arguments are added to the list of headers.
+        Underscores in the header name are replaced with dashes.
     """
 
     default_status = 200
@@ -1289,22 +1449,30 @@
                   'Content-Length', 'Content-Range', 'Content-Type',
                   'Content-Md5', 'Last-Modified'))}
 
-    def __init__(self, body='', status=None, **headers):
-        self._status_line = None
-        self._status_code = None
+    def __init__(self, body='', status=None, headers=None, **more_headers):
         self._cookies = None
-        self._headers = {'Content-Type': [self.default_content_type]}
+        self._headers = {}
         self.body = body
         self.status = status or self.default_status
         if headers:
-            for name, value in headers.items():
-                self[name] = value
-
-    def copy(self):
+            if isinstance(headers, dict):
+                headers = headers.items()
+            for name, value in headers:
+                self.add_header(name, value)
+        if more_headers:
+            for name, value in more_headers.items():
+                self.add_header(name, value)
+
+    def copy(self, cls=None):
         ''' Returns a copy of self. '''
-        copy = Response()
+        cls = cls or BaseResponse
+        assert issubclass(cls, BaseResponse)
+        copy = cls()
         copy.status = self.status
         copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
+        if self._cookies:
+            copy._cookies = SimpleCookie()
+            copy._cookies.load(self._cookies.output(header=''))
         return copy
 
     def __iter__(self):
@@ -1334,7 +1502,7 @@
             raise ValueError('String status line without a reason phrase.')
         if not 100 <= code <= 999: raise ValueError('Status code out of range.')
         self._status_code = code
-        self._status_line = status or ('%d Unknown' % code)
+        self._status_line = str(status or ('%d Unknown' % code))
 
     def _get_status(self):
         return self._status_line
@@ -1351,7 +1519,7 @@
     def headers(self):
         ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
             view on the response headers. '''
-        self.__dict__['headers'] = hdict = HeaderDict()
+        hdict = HeaderDict()
         hdict.dict = self._headers
         return hdict
 
@@ -1365,13 +1533,10 @@
             header with that name, return a default value. '''
         return self._headers.get(_hkey(name), [default])[-1]
 
-    def set_header(self, name, value, append=False):
+    def set_header(self, name, value):
         ''' Create a new response header, replacing any previously defined
             headers with the same name. '''
-        if append:
-            self.add_header(name, value)
-        else:
-            self._headers[_hkey(name)] = [str(value)]
+        self._headers[_hkey(name)] = [str(value)]
 
     def add_header(self, name, value):
         ''' Add an additional response header, not removing duplicates. '''
@@ -1380,44 +1545,36 @@
     def iter_headers(self):
         ''' Yield (header, value) tuples, skipping headers that are not
             allowed with the current response status code. '''
-        headers = self._headers.items()
-        bad_headers = self.bad_headers.get(self._status_code)
-        if bad_headers:
-            headers = [h for h in headers if h[0] not in bad_headers]
-        for name, values in headers:
-            for value in values:
-                yield name, value
-        if self._cookies:
-            for c in self._cookies.values():
-                yield 'Set-Cookie', c.OutputString()
-
-    def wsgiheader(self):
-        depr('The wsgiheader method is deprecated. See headerlist.') #0.10
         return self.headerlist
 
     @property
     def headerlist(self):
         ''' WSGI conform list of (header, value) tuples. '''
-        return list(self.iter_headers())
+        out = []
+        headers = list(self._headers.items())
+        if 'Content-Type' not in self._headers:
+            headers.append(('Content-Type', [self.default_content_type]))
+        if self._status_code in self.bad_headers:
+            bad_headers = self.bad_headers[self._status_code]
+            headers = [h for h in headers if h[0] not in bad_headers]
+        out += [(name, val) for name, vals in headers for val in vals]
+        if self._cookies:
+            for c in self._cookies.values():
+                out.append(('Set-Cookie', c.OutputString()))
+        return out
 
     content_type = HeaderProperty('Content-Type')
     content_length = HeaderProperty('Content-Length', reader=int)
+    expires = HeaderProperty('Expires',
+        reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
+        writer=lambda x: http_date(x))
 
     @property
-    def charset(self):
+    def charset(self, default='UTF-8'):
         """ Return the charset specified in the content-type header (default: utf8). """
         if 'charset=' in self.content_type:
             return self.content_type.split('charset=')[-1].split(';')[0].strip()
-        return 'UTF-8'
-
-    @property
-    def COOKIES(self):
-        """ A dict-like SimpleCookie instance. This should not be used directly.
-            See :meth:`set_cookie`. """
-        depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10
-        if not self._cookies:
-            self._cookies = SimpleCookie()
-        return self._cookies
+        return default
 
     def set_cookie(self, name, value, secret=None, **options):
         ''' Create a new cookie or replace an old one. If the `secret` parameter is
@@ -1488,48 +1645,66 @@
             out += '%s: %s\n' % (name.title(), value.strip())
         return out
 
-#: Thread-local storage for :class:`LocalRequest` and :class:`LocalResponse`
-#: attributes.
-_lctx = threading.local()
-
-def local_property(name):
+
+def local_property(name=None):
+    if name: depr('local_property() is deprecated and will be removed.') #0.12
+    ls = threading.local()
     def fget(self):
-        try:
-            return getattr(_lctx, name)
+        try: return ls.var
         except AttributeError:
             raise RuntimeError("Request context not initialized.")
-    def fset(self, value): setattr(_lctx, name, value)
-    def fdel(self): delattr(_lctx, name)
-    return property(fget, fset, fdel,
-        'Thread-local property stored in :data:`_lctx.%s`' % name)
+    def fset(self, value): ls.var = value
+    def fdel(self): del ls.var
+    return property(fget, fset, fdel, 'Thread-local property')
 
 
 class LocalRequest(BaseRequest):
     ''' A thread-local subclass of :class:`BaseRequest` with a different
-        set of attribues for each thread. There is usually only one global
+        set of attributes for each thread. There is usually only one global
         instance of this class (:data:`request`). If accessed during a
         request/response cycle, this instance always refers to the *current*
         request (even on a multithreaded server). '''
     bind = BaseRequest.__init__
-    environ = local_property('request_environ')
+    environ = local_property()
 
 
 class LocalResponse(BaseResponse):
     ''' A thread-local subclass of :class:`BaseResponse` with a different
-        set of attribues for each thread. There is usually only one global
+        set of attributes for each thread. There is usually only one global
         instance of this class (:data:`response`). Its attributes are used
         to build the HTTP response at the end of the request/response cycle.
     '''
     bind = BaseResponse.__init__
-    _status_line = local_property('response_status_line')
-    _status_code = local_property('response_status_code')
-    _cookies     = local_property('response_cookies')
-    _headers     = local_property('response_headers')
-    body         = local_property('response_body')
-
-Response = LocalResponse # BC 0.9
-Request  = LocalRequest  # BC 0.9
-
+    _status_line = local_property()
+    _status_code = local_property()
+    _cookies     = local_property()
+    _headers     = local_property()
+    body         = local_property()
+
+
+Request = BaseRequest
+Response = BaseResponse
+
+
+class HTTPResponse(Response, BottleException):
+    def __init__(self, body='', status=None, headers=None, **more_headers):
+        super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
+
+    def apply(self, response):
+        response._status_code = self._status_code
+        response._status_line = self._status_line
+        response._headers = self._headers
+        response._cookies = self._cookies
+        response.body = self.body
+
+
+class HTTPError(HTTPResponse):
+    default_status = 500
+    def __init__(self, status=None, body=None, exception=None, traceback=None,
+                 **options):
+        self.exception = exception
+        self.traceback = traceback
+        super(HTTPError, self).__init__(body, status, **options)
 
 
 
@@ -1541,6 +1716,7 @@
 
 class PluginError(BottleException): pass
 
+
 class JSONPlugin(object):
     name = 'json'
     api  = 2
@@ -1548,63 +1724,26 @@
     def __init__(self, json_dumps=json_dumps):
         self.json_dumps = json_dumps
 
-    def apply(self, callback, context):
+    def apply(self, callback, route):
         dumps = self.json_dumps
         if not dumps: return callback
         def wrapper(*a, **ka):
-            rv = callback(*a, **ka)
+            try:
+                rv = callback(*a, **ka)
+            except HTTPError:
+                rv = _e()
+
             if isinstance(rv, dict):
                 #Attempt to serialize, raises exception on failure
                 json_response = dumps(rv)
                 #Set content type only if serialization succesful
                 response.content_type = 'application/json'
                 return json_response
+            elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
+                rv.body = dumps(rv.body)
+                rv.content_type = 'application/json'
             return rv
-        return wrapper
-
-
-class HooksPlugin(object):
-    name = 'hooks'
-    api  = 2
-
-    _names = 'before_request', 'after_request', 'app_reset'
-
-    def __init__(self):
-        self.hooks = dict((name, []) for name in self._names)
-        self.app = None
-
-    def _empty(self):
-        return not (self.hooks['before_request'] or self.hooks['after_request'])
-
-    def setup(self, app):
-        self.app = app
-
-    def add(self, name, func):
-        ''' Attach a callback to a hook. '''
-        was_empty = self._empty()
-        self.hooks.setdefault(name, []).append(func)
-        if self.app and was_empty and not self._empty(): self.app.reset()
-
-    def remove(self, name, func):
-        ''' Remove a callback from a hook. '''
-        was_empty = self._empty()
-        if name in self.hooks and func in self.hooks[name]:
-            self.hooks[name].remove(func)
-        if self.app and not was_empty and self._empty(): self.app.reset()
-
-    def trigger(self, name, *a, **ka):
-        ''' Trigger a hook and return a list of results. '''
-        hooks = self.hooks[name]
-        if ka.pop('reversed', False): hooks = hooks[::-1]
-        return [hook(*a, **ka) for hook in hooks]
-
-    def apply(self, callback, context):
-        if self._empty(): return callback
-        def wrapper(*a, **ka):
-            self.trigger('before_request')
-            rv = callback(*a, **ka)
-            self.trigger('after_request', reversed=True)
-            return rv
+
         return wrapper
 
 
@@ -1620,9 +1759,6 @@
         conf = route.config.get('template')
         if isinstance(conf, (tuple, list)) and len(conf) == 2:
             return view(conf[0], **conf[1])(callback)
-        elif isinstance(conf, str) and 'template_opts' in route.config:
-            depr('The `template_opts` parameter is deprecated.') #0.9
-            return view(conf, **route.config['template_opts'])(callback)
         elif isinstance(conf, str):
             return view(conf)(callback)
         else:
@@ -1642,13 +1778,13 @@
 
     def find_module(self, fullname, path=None):
         if '.' not in fullname: return
-        packname, modname = fullname.rsplit('.', 1)
+        packname = fullname.rsplit('.', 1)[0]
         if packname != self.name: return
         return self
 
     def load_module(self, fullname):
         if fullname in sys.modules: return sys.modules[fullname]
-        packname, modname = fullname.rsplit('.', 1)
+        modname = fullname.rsplit('.', 1)[1]
         realname = self.impmask % modname
         __import__(realname)
         module = sys.modules[fullname] = sys.modules[realname]
@@ -1739,7 +1875,6 @@
     getlist = getall
 
 
-
 class FormsDict(MultiDict):
     ''' This :class:`MultiDict` subclass is used to store request form data.
         Additionally to the normal dict-like item access methods (which return
@@ -1756,10 +1891,11 @@
 
     def _fix(self, s, encoding=None):
         if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
-            s = s.encode('latin1')
-        if isinstance(s, bytes): # Python 2 WSGI
+            return s.encode('latin1').decode(encoding or self.input_encoding)
+        elif isinstance(s, bytes): # Python 2 WSGI
             return s.decode(encoding or self.input_encoding)
-        return s
+        else:
+            return s
 
     def decode(self, encoding=None):
         ''' Returns a copy with all keys and values de- or recoded to match
@@ -1773,6 +1909,7 @@
         return copy
 
     def getunicode(self, name, default=None, encoding=None):
+        ''' Return the value as a unicode string, or the default. '''
         try:
             return self._fix(self[name], encoding)
         except (UnicodeError, KeyError):
@@ -1820,7 +1957,7 @@
         Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
         that uses non-native strings.)
     '''
-    #: List of keys that do not have a 'HTTP_' prefix.
+    #: List of keys that do not have a ``HTTP_`` prefix.
     cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
 
     def __init__(self, environ):
@@ -1858,39 +1995,212 @@
     def __contains__(self, key): return self._ekey(key) in self.environ
 
 
+
 class ConfigDict(dict):
-    ''' A dict-subclass with some extras: You can access keys like attributes.
-        Uppercase attributes create new ConfigDicts and act as name-spaces.
-        Other missing attributes return None. Calling a ConfigDict updates its
-        values and returns itself.
-
-        >>> cfg = ConfigDict()
-        >>> cfg.Namespace.value = 5
-        >>> cfg.OtherNamespace(a=1, b=2)
-        >>> cfg
-        {'Namespace': {'value': 5}, 'OtherNamespace': {'a': 1, 'b': 2}}
+    ''' A dict-like configuration storage with additional support for
+        namespaces, validators, meta-data, on_change listeners and more.
+
+        This storage is optimized for fast read access. Retrieving a key
+        or using non-altering dict methods (e.g. `dict.get()`) has no overhead
+        compared to a native dict.
     '''
-
+    __slots__ = ('_meta', '_on_change')
+
+    class Namespace(DictMixin):
+
+        def __init__(self, config, namespace):
+            self._config = config
+            self._prefix = namespace
+
+        def __getitem__(self, key):
+            depr('Accessing namespaces as dicts is discouraged. '
+                 'Only use flat item access: '
+                 'cfg["names"]["pace"]["key"] -> cfg["name.space.key"]') #0.12
+            return self._config[self._prefix + '.' + key]
+
+        def __setitem__(self, key, value):
+            self._config[self._prefix + '.' + key] = value
+
+        def __delitem__(self, key):
+            del self._config[self._prefix + '.' + key]
+
+        def __iter__(self):
+            ns_prefix = self._prefix + '.'
+            for key in self._config:
+                ns, dot, name = key.rpartition('.')
+                if ns == self._prefix and name:
+                    yield name
+
+        def keys(self): return [x for x in self]
+        def __len__(self): return len(self.keys())
+        def __contains__(self, key): return self._prefix + '.' + key in self._config
+        def __repr__(self): return '<Config.Namespace %s.*>' % self._prefix
+        def __str__(self): return '<Config.Namespace %s.*>' % self._prefix
+
+        # Deprecated ConfigDict features
+        def __getattr__(self, key):
+            depr('Attribute access is deprecated.') #0.12
+            if key not in self and key[0].isupper():
+                self[key] = ConfigDict.Namespace(self._config, self._prefix + '.' + key)
+            if key not in self and key.startswith('__'):
+                raise AttributeError(key)
+            return self.get(key)
+
+        def __setattr__(self, key, value):
+            if key in ('_config', '_prefix'):
+                self.__dict__[key] = value
+                return
+            depr('Attribute assignment is deprecated.') #0.12
+            if hasattr(DictMixin, key):
+                raise AttributeError('Read-only attribute.')
+            if key in self and self[key] and isinstance(self[key], self.__class__):
+                raise AttributeError('Non-empty namespace attribute.')
+            self[key] = value
+
+        def __delattr__(self, key):
+            if key in self:
+                val = self.pop(key)
+                if isinstance(val, self.__class__):
+                    prefix = key + '.'
+                    for key in self:
+                        if key.startswith(prefix):
+                            del self[prefix+key]
+
+        def __call__(self, *a, **ka):
+            depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+            self.update(*a, **ka)
+            return self
+
+    def __init__(self, *a, **ka):
+        self._meta = {}
+        self._on_change = lambda name, value: None
+        if a or ka:
+            depr('Constructor does no longer accept parameters.') #0.12
+            self.update(*a, **ka)
+
+    def load_config(self, filename):
+        ''' Load values from an *.ini style config file.
+
+            If the config file contains sections, their names are used as
+            namespaces for the values within. The two special sections
+            ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
+        '''
+        conf = ConfigParser()
+        conf.read(filename)
+        for section in conf.sections():
+            for key, value in conf.items(section):
+                if section not in ('DEFAULT', 'bottle'):
+                    key = section + '.' + key
+                self[key] = value
+        return self
+
+    def load_dict(self, source, namespace='', make_namespaces=False):
+        ''' Import values from a dictionary structure. Nesting can be used to
+            represent namespaces.
+
+            >>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}})
+            {'name.space.key': 'value'}
+        '''
+        stack = [(namespace, source)]
+        while stack:
+            prefix, source = stack.pop()
+            if not isinstance(source, dict):
+                raise TypeError('Source is not a dict (r)' % type(key))
+            for key, value in source.items():
+                if not isinstance(key, str):
+                    raise TypeError('Key is not a string (%r)' % type(key))
+                full_key = prefix + '.' + key if prefix else key
+                if isinstance(value, dict):
+                    stack.append((full_key, value))
+                    if make_namespaces:
+                        self[full_key] = self.Namespace(self, full_key)
+                else:
+                    self[full_key] = value
+        return self
+
+    def update(self, *a, **ka):
+        ''' If the first parameter is a string, all keys are prefixed with this
+            namespace. Apart from that it works just as the usual dict.update().
+            Example: ``update('some.namespace', key='value')`` '''
+        prefix = ''
+        if a and isinstance(a[0], str):
+            prefix = a[0].strip('.') + '.'
+            a = a[1:]
+        for key, value in dict(*a, **ka).items():
+            self[prefix+key] = value
+
+    def setdefault(self, key, value):
+        if key not in self:
+            self[key] = value
+        return self[key]
+
+    def __setitem__(self, key, value):
+        if not isinstance(key, str):
+            raise TypeError('Key has type %r (not a string)' % type(key))
+
+        value = self.meta_get(key, 'filter', lambda x: x)(value)
+        if key in self and self[key] is value:
+            return
+        self._on_change(key, value)
+        dict.__setitem__(self, key, value)
+
+    def __delitem__(self, key):
+        dict.__delitem__(self, key)
+
+    def clear(self):
+        for key in self:
+            del self[key]
+
+    def meta_get(self, key, metafield, default=None):
+        ''' Return the value of a meta field for a key. '''
+        return self._meta.get(key, {}).get(metafield, default)
+
+    def meta_set(self, key, metafield, value):
+        ''' Set the meta field for a key to a new value. This triggers the
+            on-change handler for existing keys. '''
+        self._meta.setdefault(key, {})[metafield] = value
+        if key in self:
+            self[key] = self[key]
+
+    def meta_list(self, key):
+        ''' Return an iterable of meta field names defined for a key. '''
+        return self._meta.get(key, {}).keys()
+
+    # Deprecated ConfigDict features
     def __getattr__(self, key):
+        depr('Attribute access is deprecated.') #0.12
         if key not in self and key[0].isupper():
-            self[key] = ConfigDict()
+            self[key] = self.Namespace(self, key)
+        if key not in self and key.startswith('__'):
+            raise AttributeError(key)
         return self.get(key)
 
     def __setattr__(self, key, value):
+        if key in self.__slots__:
+            return dict.__setattr__(self, key, value)
+        depr('Attribute assignment is deprecated.') #0.12
         if hasattr(dict, key):
             raise AttributeError('Read-only attribute.')
-        if key in self and self[key] and isinstance(self[key], ConfigDict):
+        if key in self and self[key] and isinstance(self[key], self.Namespace):
             raise AttributeError('Non-empty namespace attribute.')
         self[key] = value
 
     def __delattr__(self, key):
-        if key in self: del self[key]
+        if key in self:
+            val = self.pop(key)
+            if isinstance(val, self.Namespace):
+                prefix = key + '.'
+                for key in self:
+                    if key.startswith(prefix):
+                        del self[prefix+key]
 
     def __call__(self, *a, **ka):
-        for key, value in dict(*a, **ka).items(): setattr(self, key, value)
+        depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+        self.update(*a, **ka)
         return self
 
 
+
 class AppStack(list):
     """ A stack-like list. Calling it returns the head of the stack. """
 
@@ -1921,11 +2231,27 @@
             yield part
 
 
+class _closeiter(object):
+    ''' This only exists to be able to attach a .close method to iterators that
+        do not support attribute assignment (most of itertools). '''
+
+    def __init__(self, iterator, close=None):
+        self.iterator = iterator
+        self.close_callbacks = makelist(close)
+
+    def __iter__(self):
+        return iter(self.iterator)
+
+    def close(self):
+        for func in self.close_callbacks:
+            func()
+
+
 class ResourceManager(object):
     ''' This class manages a list of search paths and helps to find and open
-        aplication-bound resources (files).
-
-        :param base: default value for same-named :meth:`add_path` parameter.
+        application-bound resources (files).
+
+        :param base: default value for :meth:`add_path` calls.
         :param opener: callable used to open resources.
         :param cachemode: controls which lookups are cached. One of 'all',
                          'found' or 'none'.
@@ -1938,25 +2264,24 @@
 
         #: A list of search paths. See :meth:`add_path` for details.
         self.path = []
-        #: A cache for resolved paths. `res.cache.clear()`` clears the cache.
+        #: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
         self.cache = {}
 
     def add_path(self, path, base=None, index=None, create=False):
-        ''' Add a new path to the list of search paths. Return False if it does
-            not exist.
-
-            :param path: The new search path. Relative paths are turned into an
-                absolute and normalized form. If the path looks like a file (not
-                ending in `/`), the filename is stripped off.
+        ''' Add a new path to the list of search paths. Return False if the
+            path does not exist.
+
+            :param path: The new search path. Relative paths are turned into
+                an absolute and normalized form. If the path looks like a file
+                (not ending in `/`), the filename is stripped off.
             :param base: Path used to absolutize relative search paths.
-                Defaults to `:attr:base` which defaults to ``./``.
-            :param index: Position within the list of search paths. Defaults to
-                last index (appends to the list).
-            :param create: Create non-existent search paths. Off by default.
+                Defaults to :attr:`base` which defaults to ``os.getcwd()``.
+            :param index: Position within the list of search paths. Defaults
+                to last index (appends to the list).
 
             The `base` parameter makes it easy to reference files installed
             along with a python module or package::
-            
+
                 res.add_path('./resources/', __file__)
         '''
         base = os.path.abspath(os.path.dirname(base or self.base))
@@ -1965,12 +2290,13 @@
         if path in self.path:
             self.path.remove(path)
         if create and not os.path.isdir(path):
-            os.mkdirs(path)
+            os.makedirs(path)
         if index is None:
             self.path.append(path)
         else:
             self.path.insert(index, path)
         self.cache.clear()
+        return os.path.exists(path)
 
     def __iter__(self):
         ''' Iterate over all existing files in all registered paths. '''
@@ -2004,7 +2330,70 @@
         ''' Find a resource and return a file object, or raise IOError. '''
         fname = self.lookup(name)
         if not fname: raise IOError("Resource %r not found." % name)
-        return self.opener(name, mode=mode, *args, **kwargs)
+        return self.opener(fname, mode=mode, *args, **kwargs)
+
+
+class FileUpload(object):
+
+    def __init__(self, fileobj, name, filename, headers=None):
+        ''' Wrapper for file uploads. '''
+        #: Open file(-like) object (BytesIO buffer or temporary file)
+        self.file = fileobj
+        #: Name of the upload form field
+        self.name = name
+        #: Raw filename as sent by the client (may contain unsafe characters)
+        self.raw_filename = filename
+        #: A :class:`HeaderDict` with additional headers (e.g. content-type)
+        self.headers = HeaderDict(headers) if headers else HeaderDict()
+
+    content_type = HeaderProperty('Content-Type')
+    content_length = HeaderProperty('Content-Length', reader=int, default=-1)
+
+    @cached_property
+    def filename(self):
+        ''' Name of the file on the client file system, but normalized to ensure
+            file system compatibility. An empty filename is returned as 'empty'.
+
+            Only ASCII letters, digits, dashes, underscores and dots are
+            allowed in the final filename. Accents are removed, if possible.
+            Whitespace is replaced by a single dash. Leading or tailing dots
+            or dashes are removed. The filename is limited to 255 characters.
+        '''
+        fname = self.raw_filename
+        if not isinstance(fname, unicode):
+            fname = fname.decode('utf8', 'ignore')
+        fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
+        fname = os.path.basename(fname.replace('\\', os.path.sep))
+        fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
+        fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
+        return fname[:255] or 'empty'
+
+    def _copy_file(self, fp, chunk_size=2**16):
+        read, write, offset = self.file.read, fp.write, self.file.tell()
+        while 1:
+            buf = read(chunk_size)
+            if not buf: break
+            write(buf)
+        self.file.seek(offset)
+
+    def save(self, destination, overwrite=False, chunk_size=2**16):
+        ''' Save file to disk or copy its content to an open file(-like) object.
+            If *destination* is a directory, :attr:`filename` is added to the
+            path. Existing files are not overwritten by default (IOError).
+
+            :param destination: File path, directory or file(-like) object.
+            :param overwrite: If True, replace existing files. (default: False)
+            :param chunk_size: Bytes to read at a time. (default: 64kb)
+        '''
+        if isinstance(destination, basestring): # Except file-likes here
+            if os.path.isdir(destination):
+                destination = os.path.join(destination, self.filename)
+            if not overwrite and os.path.exists(destination):
+                raise IOError('File exists.')
+            with open(destination, 'wb') as fp:
+                self._copy_file(fp, chunk_size)
+        else:
+            self._copy_file(destination, chunk_size)
 
 
 
@@ -2016,7 +2405,7 @@
 ###############################################################################
 
 
-def abort(code=500, text='Unknown Error: Application stopped.'):
+def abort(code=500, text='Unknown Error.'):
     """ Aborts execution and causes a HTTP error. """
     raise HTTPError(code, text)
 
@@ -2024,10 +2413,13 @@
 def redirect(url, code=None):
     """ Aborts execution and causes a 303 or 302 redirect, depending on
         the HTTP protocol version. """
-    if code is None:
+    if not code:
         code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
-    location = urljoin(request.url, url)
-    raise HTTPResponse("", status=code, header=dict(Location=location))
+    res = response.copy(cls=HTTPResponse)
+    res.status = code
+    res.body = ""
+    res.set_header('Location', urljoin(request.url, url))
+    raise res
 
 
 def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
@@ -2040,15 +2432,29 @@
         yield part
 
 
-def static_file(filename, root, mimetype='auto', download=False):
+def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
     """ Open a file in a safe way and return :exc:`HTTPResponse` with status
-        code 200, 305, 401 or 404. Set Content-Type, Content-Encoding,
-        Content-Length and Last-Modified header. Obey If-Modified-Since header
-        and HEAD requests.
+        code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
+        ``Content-Length`` and ``Last-Modified`` headers are set if possible.
+        Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
+        requests.
+
+        :param filename: Name or path of the file to send.
+        :param root: Root path for file lookups. Should be an absolute directory
+            path.
+        :param mimetype: Defines the content-type header (default: guess from
+            file extension)
+        :param download: If True, ask the browser to open a `Save as...` dialog
+            instead of opening the file with the associated program. You can
+            specify a custom filename as a string. If not specified, the
+            original filename is used (default: False).
+        :param charset: The charset to use for files with a ``text/*``
+            mime-type. (default: UTF-8)
     """
+
     root = os.path.abspath(root) + os.sep
     filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
-    header = dict()
+    headers = dict()
 
     if not filename.startswith(root):
         return HTTPError(403, "Access denied.")
@@ -2059,41 +2465,43 @@
 
     if mimetype == 'auto':
         mimetype, encoding = mimetypes.guess_type(filename)
-        if mimetype: header['Content-Type'] = mimetype
-        if encoding: header['Content-Encoding'] = encoding
-    elif mimetype:
-        header['Content-Type'] = mimetype
+        if encoding: headers['Content-Encoding'] = encoding
+
+    if mimetype:
+        if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
+            mimetype += '; charset=%s' % charset
+        headers['Content-Type'] = mimetype
 
     if download:
         download = os.path.basename(filename if download == True else download)
-        header['Content-Disposition'] = 'attachment; filename="%s"' % download
+        headers['Content-Disposition'] = 'attachment; filename="%s"' % download
 
     stats = os.stat(filename)
-    header['Content-Length'] = clen = stats.st_size
+    headers['Content-Length'] = clen = stats.st_size
     lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
-    header['Last-Modified'] = lm
+    headers['Last-Modified'] = lm
 
     ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
     if ims:
         ims = parse_date(ims.split(";")[0].strip())
     if ims is not None and ims >= int(stats.st_mtime):
-        header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
-        return HTTPResponse(status=304, header=header)
+        headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+        return HTTPResponse(status=304, **headers)
 
     body = '' if request.method == 'HEAD' else open(filename, 'rb')
 
-    header["Accept-Ranges"] = "bytes"
+    headers["Accept-Ranges"] = "bytes"
     ranges = request.environ.get('HTTP_RANGE')
     if 'HTTP_RANGE' in request.environ:
         ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
         if not ranges:
             return HTTPError(416, "Requested Range Not Satisfiable")
         offset, end = ranges[0]
-        header["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
-        header["Content-Length"] = str(end-offset)
+        headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
+        headers["Content-Length"] = str(end-offset)
         if body: body = _file_iter_range(body, offset, end-offset)
-        return HTTPResponse(body, header=header, status=206)
-    return HTTPResponse(body, header=header)
+        return HTTPResponse(body, status=206, **headers)
+    return HTTPResponse(body, **headers)
 
 
 
@@ -2109,8 +2517,17 @@
     """ Change the debug level.
     There is only one debug level supported at the moment."""
     global DEBUG
+    if mode: warnings.simplefilter('default')
     DEBUG = bool(mode)
 
+def http_date(value):
+    if isinstance(value, (datedate, datetime)):
+        value = value.utctimetuple()
+    elif isinstance(value, (int, float)):
+        value = time.gmtime(value)
+    if not isinstance(value, basestring):
+        value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
+    return value
 
 def parse_date(ims):
     """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
@@ -2120,7 +2537,6 @@
     except (TypeError, ValueError, IndexError, OverflowError):
         return None
 
-
 def parse_auth(header):
     """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
     try:
@@ -2149,6 +2565,17 @@
         except ValueError:
             pass
 
+def _parse_qsl(qs):
+    r = []
+    for pair in qs.replace(';','&').split('&'):
+        if not pair: continue
+        nv = pair.split('=', 1)
+        if len(nv) != 2: nv.append('')
+        key = urlunquote(nv[0].replace('+', ' '))
+        value = urlunquote(nv[1].replace('+', ' '))
+        r.append((key, value))
+    return r
+
 def _lscmp(a, b):
     ''' Compares two strings in a cryptographically safe way:
         Runtime is not affected by length of common prefix. '''
@@ -2185,7 +2612,7 @@
 
 def html_quote(string):
     ''' Escape and quote a string to be used as an HTTP attribute.'''
-    return '"%s"' % html_escape(string).replace('\n','%#10;')\
+    return '"%s"' % html_escape(string).replace('\n','&#10;')\
                     .replace('\r','&#13;').replace('\t','&#9;')
 
 
@@ -2195,18 +2622,17 @@
     takes optional keyword arguments. The output is best described by example::
 
         a()         -> '/a'
-        b(x, y)     -> '/b/:x/:y'
-        c(x, y=5)   -> '/c/:x' and '/c/:x/:y'
-        d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y'
+        b(x, y)     -> '/b/<x>/<y>'
+        c(x, y=5)   -> '/c/<x>' and '/c/<x>/<y>'
+        d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
     """
-    import inspect # Expensive module. Only import if necessary.
     path = '/' + func.__name__.replace('__','/').lstrip('/')
-    spec = inspect.getargspec(func)
+    spec = getargspec(func)
     argc = len(spec[0]) - len(spec[3] or [])
-    path += ('/:%s' * argc) % tuple(spec[0][:argc])
+    path += ('/<%s>' * argc) % tuple(spec[0][:argc])
     yield path
     for arg in spec[0][argc:]:
-        path += '/:%s' % arg
+        path += '/<%s>' % arg
         yield path
 
 
@@ -2241,38 +2667,18 @@
     return new_script_name, new_path_info
 
 
-def validate(**vkargs):
-    """
-    Validates and manipulates keyword arguments by user defined callables.
-    Handles ValueError and missing arguments by raising HTTPError(403).
-    """
-    depr('Use route wildcard filters instead.')
-    def decorator(func):
-        @functools.wraps(func)
-        def wrapper(*args, **kargs):
-            for key, value in vkargs.items():
-                if key not in kargs:
-                    abort(403, 'Missing parameter: %s' % key)
-                try:
-                    kargs[key] = value(kargs[key])
-                except ValueError:
-                    abort(403, 'Wrong parameter format for: %s' % key)
-            return func(*args, **kargs)
-        return wrapper
-    return decorator
-
-
 def auth_basic(check, realm="private", text="Access denied"):
     ''' Callback decorator to require HTTP auth (basic).
         TODO: Add route(check_auth=...) parameter. '''
     def decorator(func):
-      def wrapper(*a, **ka):
-        user, password = request.auth or (None, None)
-        if user is None or not check(user, password):
-          response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm
-          return HTTPError(401, text)
-        return func(*a, **ka)
-      return wrapper
+        def wrapper(*a, **ka):
+            user, password = request.auth or (None, None)
+            if user is None or not check(user, password):
+                err = HTTPError(401, text)
+                err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
+                return err
+            return func(*a, **ka)
+        return wrapper
     return decorator
 
 
@@ -2311,8 +2717,8 @@
 
 class ServerAdapter(object):
     quiet = False
-    def __init__(self, host='127.0.0.1', port=8080, **config):
-        self.options = config
+    def __init__(self, host='127.0.0.1', port=8080, **options):
+        self.options = options
         self.host = host
         self.port = int(port)
 
@@ -2342,20 +2748,49 @@
 
 
 class WSGIRefServer(ServerAdapter):
-    def run(self, handler): # pragma: no cover
-        from wsgiref.simple_server import make_server, WSGIRequestHandler
-        if self.quiet:
-            class QuietHandler(WSGIRequestHandler):
-                def log_request(*args, **kw): pass
-            self.options['handler_class'] = QuietHandler
-        srv = make_server(self.host, self.port, handler, **self.options)
+    def run(self, app): # pragma: no cover
+        from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
+        from wsgiref.simple_server import make_server
+        import socket
+
+        class FixedHandler(WSGIRequestHandler):
+            def address_string(self): # Prevent reverse DNS lookups please.
+                return self.client_address[0]
+            def log_request(*args, **kw):
+                if not self.quiet:
+                    return WSGIRequestHandler.log_request(*args, **kw)
+
+        handler_cls = self.options.get('handler_class', FixedHandler)
+        server_cls  = self.options.get('server_class', WSGIServer)
+
+        if ':' in self.host: # Fix wsgiref for IPv6 addresses.
+            if getattr(server_cls, 'address_family') == socket.AF_INET:
+                class server_cls(server_cls):
+                    address_family = socket.AF_INET6
+
+        srv = make_server(self.host, self.port, app, server_cls, handler_cls)
         srv.serve_forever()
 
 
 class CherryPyServer(ServerAdapter):
     def run(self, handler): # pragma: no cover
         from cherrypy import wsgiserver
-        server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
+        self.options['bind_addr'] = (self.host, self.port)
+        self.options['wsgi_app'] = handler
+
+        certfile = self.options.get('certfile')
+        if certfile:
+            del self.options['certfile']
+        keyfile = self.options.get('keyfile')
+        if keyfile:
+            del self.options['keyfile']
+
+        server = wsgiserver.CherryPyWSGIServer(**self.options)
+        if certfile:
+            server.ssl_certificate = certfile
+        if keyfile:
+            server.ssl_private_key = keyfile
+
         try:
             server.start()
         finally:
@@ -2371,9 +2806,8 @@
 class PasteServer(ServerAdapter):
     def run(self, handler): # pragma: no cover
         from paste import httpserver
-        if not self.quiet:
-            from paste.translogger import TransLogger
-            handler = TransLogger(handler)
+        from paste.translogger import TransLogger
+        handler = TransLogger(handler, setup_console_handler=(not self.quiet))
         httpserver.serve(handler, host=self.host, port=str(self.port),
                          **self.options)
 
@@ -2413,7 +2847,7 @@
         import tornado.wsgi, tornado.httpserver, tornado.ioloop
         container = tornado.wsgi.WSGIContainer(handler)
         server = tornado.httpserver.HTTPServer(container)
-        server.listen(port=self.port)
+        server.listen(port=self.port,address=self.host)
         tornado.ioloop.IOLoop.instance().start()
 
 
@@ -2455,17 +2889,30 @@
 class GeventServer(ServerAdapter):
     """ Untested. Options:
 
-        * `monkey` (default: True) fixes the stdlib to use greenthreads.
         * `fast` (default: False) uses libevent's http server, but has some
           issues: No streaming, no pipelining, no SSL.
+        * See gevent.wsgi.WSGIServer() documentation for more options.
     """
     def run(self, handler):
-        from gevent import wsgi as wsgi_fast, pywsgi, monkey, local
-        if self.options.get('monkey', True):
-            if not threading.local is local.local: monkey.patch_all()
-        wsgi = wsgi_fast if self.options.get('fast') else pywsgi
-        log = None if self.quiet else 'default'
-        wsgi.WSGIServer((self.host, self.port), handler, log=log).serve_forever()
+        from gevent import wsgi, pywsgi, local
+        if not isinstance(threading.local(), local.local):
+            msg = "Bottle requires gevent.monkey.patch_all() (before import)"
+            raise RuntimeError(msg)
+        if not self.options.pop('fast', None): wsgi = pywsgi
+        self.options['log'] = None if self.quiet else 'default'
+        address = (self.host, self.port)
+        server = wsgi.WSGIServer(address, handler, **self.options)
+        if 'BOTTLE_CHILD' in os.environ:
+            import signal
+            signal.signal(signal.SIGINT, lambda s, f: server.stop())
+        server.serve_forever()
+
+
+class GeventSocketIOServer(ServerAdapter):
+    def run(self,handler):
+        from socketio import server
+        address = (self.host, self.port)
+        server.SocketIOServer(address, handler, **self.options).serve_forever()
 
 
 class GunicornServer(ServerAdapter):
@@ -2539,6 +2986,7 @@
     'gunicorn': GunicornServer,
     'eventlet': EventletServer,
     'gevent': GeventServer,
+    'geventSocketIO':GeventSocketIOServer,
     'rocket': RocketServer,
     'bjoern' : BjoernServer,
     'auto': AutoServer,
@@ -2590,7 +3038,7 @@
 _debug = debug
 def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
         interval=1, reloader=False, quiet=False, plugins=None,
-        debug=False, **kargs):
+        debug=None, **kargs):
     """ Start a server instance. This method blocks until the server terminates.
 
         :param app: WSGI application or target string supported by
@@ -2633,7 +3081,7 @@
         return
 
     try:
-        _debug(debug)
+        if debug is not None: _debug(debug)
         app = app or default_app()
         if isinstance(app, basestring):
             app = load_app(app)
@@ -2770,11 +3218,19 @@
     def search(cls, name, lookup=[]):
         """ Search name in all directories specified in lookup.
         First without, then with common extensions. Return first hit. """
-        if os.path.isfile(name): return name
+        if not lookup:
+            depr('The template lookup path list should not be empty.') #0.12
+            lookup = ['.']
+
+        if os.path.isabs(name) and os.path.isfile(name):
+            depr('Absolute template path names are deprecated.') #0.12
+            return os.path.abspath(name)
+
         for spath in lookup:
-            fname = os.path.join(spath, name)
-            if os.path.isfile(fname):
-                return fname
+            spath = os.path.abspath(spath) + os.sep
+            fname = os.path.abspath(os.path.join(spath, name))
+            if not fname.startswith(spath): continue
+            if os.path.isfile(fname): return fname
             for ext in cls.extensions:
                 if os.path.isfile('%s.%s' % (fname, ext)):
                     return '%s.%s' % (fname, ext)
@@ -2799,8 +3255,8 @@
         """ Render the template with the specified local variables and return
         a single byte or unicode string. If it is a byte string, the encoding
         must match self.encoding. This method must be thread-safe!
-        Local variables may be provided in dictionaries (*args)
-        or directly, as keywords (**kwargs).
+        Local variables may be provided in dictionaries (args)
+        or directly, as keywords (kwargs).
         """
         raise NotImplementedError
 
@@ -2845,7 +3301,7 @@
 
 
 class Jinja2Template(BaseTemplate):
-    def prepare(self, filters=None, tests=None, **kwargs):
+    def prepare(self, filters=None, tests=None, globals={}, **kwargs):
         from jinja2 import Environment, FunctionLoader
         if 'prefix' in kwargs: # TODO: to be removed after a while
             raise RuntimeError('The keyword argument `prefix` has been removed. '
@@ -2853,6 +3309,7 @@
         self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
         if filters: self.env.filters.update(filters)
         if tests: self.env.tests.update(tests)
+        if globals: self.env.globals.update(globals)
         if self.source:
             self.tpl = self.env.from_string(self.source)
         else:
@@ -2871,184 +3328,247 @@
             return f.read().decode(self.encoding)
 
 
-class SimpleTALTemplate(BaseTemplate):
-    ''' Deprecated, do not use. '''
-    def prepare(self, **options):
-        depr('The SimpleTAL template handler is deprecated'\
-             ' and will be removed in 0.12')
-        from simpletal import simpleTAL
-        if self.source:
-            self.tpl = simpleTAL.compileHTMLTemplate(self.source)
-        else:
-            with open(self.filename, 'rb') as fp:
-                self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read()))
-
-    def render(self, *args, **kwargs):
-        from simpletal import simpleTALES
-        for dictarg in args: kwargs.update(dictarg)
-        context = simpleTALES.Context()
-        for k,v in self.defaults.items():
-            context.addGlobal(k, v)
-        for k,v in kwargs.items():
-            context.addGlobal(k, v)
-        output = StringIO()
-        self.tpl.expand(context, output)
-        return output.getvalue()
-
-
 class SimpleTemplate(BaseTemplate):
-    blocks = ('if', 'elif', 'else', 'try', 'except', 'finally', 'for', 'while',
-              'with', 'def', 'class')
-    dedent_blocks = ('elif', 'else', 'except', 'finally')
-
-    @lazy_attribute
-    def re_pytokens(cls):
-        ''' This matches comments and all kinds of quoted strings but does
-            NOT match comments (#...) within quoted strings. (trust me) '''
-        return re.compile(r'''
-            (''(?!')|""(?!")|'{6}|"{6}    # Empty strings (all 4 types)
-             |'(?:[^\\']|\\.)+?'          # Single quotes (')
-             |"(?:[^\\"]|\\.)+?"          # Double quotes (")
-             |'{3}(?:[^\\]|\\.|\n)+?'{3}  # Triple-quoted strings (')
-             |"{3}(?:[^\\]|\\.|\n)+?"{3}  # Triple-quoted strings (")
-             |\#.*                        # Comments
-            )''', re.VERBOSE)
-
-    def prepare(self, escape_func=html_escape, noescape=False, **kwargs):
+
+    def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
         self.cache = {}
         enc = self.encoding
         self._str = lambda x: touni(x, enc)
         self._escape = lambda x: escape_func(touni(x, enc))
+        self.syntax = syntax
         if noescape:
             self._str, self._escape = self._escape, self._str
 
-    @classmethod
-    def split_comment(cls, code):
-        """ Removes comments (#...) from python code. """
-        if '#' not in code: return code
-        #: Remove comments only (leave quoted strings as they are)
-        subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0)
-        return re.sub(cls.re_pytokens, subf, code)
-
     @cached_property
     def co(self):
         return compile(self.code, self.filename or '<string>', 'exec')
 
     @cached_property
     def code(self):
-        stack = [] # Current Code indentation
-        lineno = 0 # Current line of code
-        ptrbuffer = [] # Buffer for printable strings and token tuple instances
-        codebuffer = [] # Buffer for generated python code
-        multiline = dedent = oneline = False
-        template = self.source or open(self.filename, 'rb').read()
-
-        def yield_tokens(line):
-            for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)):
-                if i % 2:
-                    if part.startswith('!'): yield 'RAW', part[1:]
-                    else: yield 'CMD', part
-                else: yield 'TXT', part
-
-        def flush(): # Flush the ptrbuffer
-            if not ptrbuffer: return
-            cline = ''
-            for line in ptrbuffer:
-                for token, value in line:
-                    if token == 'TXT': cline += repr(value)
-                    elif token == 'RAW': cline += '_str(%s)' % value
-                    elif token == 'CMD': cline += '_escape(%s)' % value
-                    cline +=  ', '
-                cline = cline[:-2] + '\\\n'
-            cline = cline[:-2]
-            if cline[:-1].endswith('\\\\\\\\\\n'):
-                cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr'
-            cline = '_printlist([' + cline + '])'
-            del ptrbuffer[:] # Do this before calling code() again
-            code(cline)
-
-        def code(stmt):
-            for line in stmt.splitlines():
-                codebuffer.append('  ' * len(stack) + line.strip())
-
-        for line in template.splitlines(True):
-            lineno += 1
-            line = touni(line, self.encoding)
-            sline = line.lstrip()
-            if lineno <= 2:
-                m = re.match(r"%\s*#.*coding[:=]\s*([-\w.]+)", sline)
-                if m: self.encoding = m.group(1)
-                if m: line = line.replace('coding','coding (removed)')
-            if sline and sline[0] == '%' and sline[:2] != '%%':
-                line = line.split('%',1)[1].lstrip() # Full line following the %
-                cline = self.split_comment(line).strip()
-                cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0]
-                flush() # You are actually reading this? Good luck, it's a mess :)
-                if cmd in self.blocks or multiline:
-                    cmd = multiline or cmd
-                    dedent = cmd in self.dedent_blocks # "else:"
-                    if dedent and not oneline and not multiline:
-                        cmd = stack.pop()
-                    code(line)
-                    oneline = not cline.endswith(':') # "if 1: pass"
-                    multiline = cmd if cline.endswith('\\') else False
-                    if not oneline and not multiline:
-                        stack.append(cmd)
-                elif cmd == 'end' and stack:
-                    code('#end(%s) %s' % (stack.pop(), line.strip()[3:]))
-                elif cmd == 'include':
-                    p = cline.split(None, 2)[1:]
-                    if len(p) == 2:
-                        code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1]))
-                    elif p:
-                        code("_=_include(%s, _stdout)" % repr(p[0]))
-                    else: # Empty %include -> reverse of %rebase
-                        code("_printlist(_base)")
-                elif cmd == 'rebase':
-                    p = cline.split(None, 2)[1:]
-                    if len(p) == 2:
-                        code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1]))
-                    elif p:
-                        code("globals()['_rebase']=(%s, {})" % repr(p[0]))
-                else:
-                    code(line)
-            else: # Line starting with text (not '%') or '%%' (escaped)
-                if line.strip().startswith('%%'):
-                    line = line.replace('%%', '%', 1)
-                ptrbuffer.append(yield_tokens(line))
-        flush()
-        return '\n'.join(codebuffer) + '\n'
-
-    def subtemplate(self, _name, _stdout, *args, **kwargs):
-        for dictarg in args: kwargs.update(dictarg)
+        source = self.source
+        if not source:
+            with open(self.filename, 'rb') as f:
+                source = f.read()
+        try:
+            source, encoding = touni(source), 'utf8'
+        except UnicodeError:
+            depr('Template encodings other than utf8 are no longer supported.') #0.11
+            source, encoding = touni(source, 'latin1'), 'latin1'
+        parser = StplParser(source, encoding=encoding, syntax=self.syntax)
+        code = parser.translate()
+        self.encoding = parser.encoding
+        return code
+
+    def _rebase(self, _env, _name=None, **kwargs):
+        if _name is None:
+            depr('Rebase function called without arguments.'
+                 ' You were probably looking for {{base}}?', True) #0.12
+        _env['_rebase'] = (_name, kwargs)
+
+    def _include(self, _env, _name=None, **kwargs):
+        if _name is None:
+            depr('Rebase function called without arguments.'
+                 ' You were probably looking for {{base}}?', True) #0.12
+        env = _env.copy()
+        env.update(kwargs)
         if _name not in self.cache:
             self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
-        return self.cache[_name].execute(_stdout, kwargs)
-
-    def execute(self, _stdout, *args, **kwargs):
-        for dictarg in args: kwargs.update(dictarg)
+        return self.cache[_name].execute(env['_stdout'], env)
+
+    def execute(self, _stdout, kwargs):
         env = self.defaults.copy()
+        env.update(kwargs)
         env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
-               '_include': self.subtemplate, '_str': self._str,
-               '_escape': self._escape, 'get': env.get,
-               'setdefault': env.setdefault, 'defined': env.__contains__})
-        env.update(kwargs)
+            'include': functools.partial(self._include, env),
+            'rebase': functools.partial(self._rebase, env), '_rebase': None,
+            '_str': self._str, '_escape': self._escape, 'get': env.get,
+            'setdefault': env.setdefault, 'defined': env.__contains__ })
         eval(self.co, env)
-        if '_rebase' in env:
-            subtpl, rargs = env['_rebase']
-            rargs['_base'] = _stdout[:] #copy stdout
+        if env.get('_rebase'):
+            subtpl, rargs = env.pop('_rebase')
+            rargs['base'] = ''.join(_stdout) #copy stdout
             del _stdout[:] # clear stdout
-            return self.subtemplate(subtpl,_stdout,rargs)
+            return self._include(env, subtpl, **rargs)
         return env
 
     def render(self, *args, **kwargs):
         """ Render the template using keyword arguments as local variables. """
-        for dictarg in args: kwargs.update(dictarg)
-        stdout = []
-        self.execute(stdout, kwargs)
+        env = {}; stdout = []
+        for dictarg in args: env.update(dictarg)
+        env.update(kwargs)
+        self.execute(stdout, env)
         return ''.join(stdout)
 
 
+class StplSyntaxError(TemplateError): pass
+
+
+class StplParser(object):
+    ''' Parser for stpl templates. '''
+    _re_cache = {} #: Cache for compiled re patterns
+    # This huge pile of voodoo magic splits python code into 8 different tokens.
+    # 1: All kinds of python strings (trust me, it works)
+    _re_tok = '((?m)[urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
+               '|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
+               '|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
+               '|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
+    _re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
+    # 2: Comments (until end of line, but not the newline itself)
+    _re_tok += '|(#.*)'
+    # 3,4: Keywords that start or continue a python block (only start of line)
+    _re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
+               '|^([ \\t]*(?:elif|else|except|finally)\\b)'
+    # 5: Our special 'end' keyword (but only if it stands alone)
+    _re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
+    # 6: A customizable end-of-code-block template token (only end of line)
+    _re_tok += '|(%(block_close)s[ \\t]*(?=$))'
+    # 7: And finally, a single newline. The 8th token is 'everything else'
+    _re_tok += '|(\\r?\\n)'
+    # Match the start tokens of code areas in a template
+    _re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))(%%?)'
+    # Match inline statements (may contain python strings)
+    _re_inl = '%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl
+
+    default_syntax = '<% %> % {{ }}'
+
+    def __init__(self, source, syntax=None, encoding='utf8'):
+        self.source, self.encoding = touni(source, encoding), encoding
+        self.set_syntax(syntax or self.default_syntax)
+        self.code_buffer, self.text_buffer = [], []
+        self.lineno, self.offset = 1, 0
+        self.indent, self.indent_mod = 0, 0
+
+    def get_syntax(self):
+        ''' Tokens as a space separated string (default: <% %> % {{ }}) '''
+        return self._syntax
+
+    def set_syntax(self, syntax):
+        self._syntax = syntax
+        self._tokens = syntax.split()
+        if not syntax in self._re_cache:
+            names = 'block_start block_close line_start inline_start inline_end'
+            etokens = map(re.escape, self._tokens)
+            pattern_vars = dict(zip(names.split(), etokens))
+            patterns = (self._re_split, self._re_tok, self._re_inl)
+            patterns = [re.compile(p%pattern_vars) for p in patterns]
+            self._re_cache[syntax] = patterns
+        self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
+
+    syntax = property(get_syntax, set_syntax)
+
+    def translate(self):
+        if self.offset: raise RuntimeError('Parser is a one time instance.')
+        while True:
+            m = self.re_split.search(self.source[self.offset:])
+            if m:
+                text = self.source[self.offset:self.offset+m.start()]
+                self.text_buffer.append(text)
+                self.offset += m.end()
+                if m.group(1): # New escape syntax
+                    line, sep, _ = self.source[self.offset:].partition('\n')
+                    self.text_buffer.append(m.group(2)+m.group(5)+line+sep)
+                    self.offset += len(line+sep)+1
+                    continue
+                elif m.group(5): # Old escape syntax
+                    depr('Escape code lines with a backslash.') #0.12
+                    line, sep, _ = self.source[self.offset:].partition('\n')
+                    self.text_buffer.append(m.group(2)+line+sep)
+                    self.offset += len(line+sep)+1
+                    continue
+                self.flush_text()
+                self.read_code(multiline=bool(m.group(4)))
+            else: break
+        self.text_buffer.append(self.source[self.offset:])
+        self.flush_text()
+        return ''.join(self.code_buffer)
+
+    def read_code(self, multiline):
+        code_line, comment = '', ''
+        while True:
+            m = self.re_tok.search(self.source[self.offset:])
+            if not m:
+                code_line += self.source[self.offset:]
+                self.offset = len(self.source)
+                self.write_code(code_line.strip(), comment)
+                return
+            code_line += self.source[self.offset:self.offset+m.start()]
+            self.offset += m.end()
+            _str, _com, _blk1, _blk2, _end, _cend, _nl = m.groups()
+            if code_line and (_blk1 or _blk2): # a if b else c
+                code_line += _blk1 or _blk2
+                continue
+            if _str:    # Python string
+                code_line += _str
+            elif _com:  # Python comment (up to EOL)
+                comment = _com
+                if multiline and _com.strip().endswith(self._tokens[1]):
+                    multiline = False # Allow end-of-block in comments
+            elif _blk1: # Start-block keyword (if/for/while/def/try/...)
+                code_line, self.indent_mod = _blk1, -1
+                self.indent += 1
+            elif _blk2: # Continue-block keyword (else/elif/except/...)
+                code_line, self.indent_mod = _blk2, -1
+            elif _end:  # The non-standard 'end'-keyword (ends a block)
+                self.indent -= 1
+            elif _cend: # The end-code-block template token (usually '%>')
+                if multiline: multiline = False
+                else: code_line += _cend
+            else: # \n
+                self.write_code(code_line.strip(), comment)
+                self.lineno += 1
+                code_line, comment, self.indent_mod = '', '', 0
+                if not multiline:
+                    break
+
+    def flush_text(self):
+        text = ''.join(self.text_buffer)
+        del self.text_buffer[:]
+        if not text: return
+        parts, pos, nl = [], 0, '\\\n'+'  '*self.indent
+        for m in self.re_inl.finditer(text):
+            prefix, pos = text[pos:m.start()], m.end()
+            if prefix:
+                parts.append(nl.join(map(repr, prefix.splitlines(True))))
+            if prefix.endswith('\n'): parts[-1] += nl
+            parts.append(self.process_inline(m.group(1).strip()))
+        if pos < len(text):
+            prefix = text[pos:]
+            lines = prefix.splitlines(True)
+            if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
+            elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
+            parts.append(nl.join(map(repr, lines)))
+        code = '_printlist((%s,))' % ', '.join(parts)
+        self.lineno += code.count('\n')+1
+        self.write_code(code)
+
+    def process_inline(self, chunk):
+        if chunk[0] == '!': return '_str(%s)' % chunk[1:]
+        return '_escape(%s)' % chunk
+
+    def write_code(self, line, comment=''):
+        line, comment = self.fix_backward_compatibility(line, comment)
+        code  = '  ' * (self.indent+self.indent_mod)
+        code += line.lstrip() + comment + '\n'
+        self.code_buffer.append(code)
+
+    def fix_backward_compatibility(self, line, comment):
+        parts = line.strip().split(None, 2)
+        if parts and parts[0] in ('include', 'rebase'):
+            depr('The include and rebase keywords are functions now.') #0.12
+            if len(parts) == 1:   return "_printlist([base])", comment
+            elif len(parts) == 2: return "_=%s(%r)" % tuple(parts), comment
+            else:                 return "_=%s(%r, %s)" % tuple(parts), comment
+        if self.lineno <= 2 and not line.strip() and 'coding' in comment:
+            m = re.match(r"#.*coding[:=]\s*([-\w.]+)", comment)
+            if m:
+                depr('PEP263 encoding strings in templates are deprecated.') #0.12
+                enc = m.group(1)
+                self.source = self.source.encode(self.encoding).decode(enc)
+                self.encoding = enc
+                return line, comment.replace('coding','coding*')
+        return line, comment
+
+
 def template(*args, **kwargs):
     '''
     Get a rendered template as a string iterator.
@@ -3057,26 +3577,26 @@
     or directly (as keyword arguments).
     '''
     tpl = args[0] if args else None
-    template_adapter = kwargs.pop('template_adapter', SimpleTemplate)
-    if tpl not in TEMPLATES or DEBUG:
+    adapter = kwargs.pop('template_adapter', SimpleTemplate)
+    lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
+    tplid = (id(lookup), tpl)
+    if tplid not in TEMPLATES or DEBUG:
         settings = kwargs.pop('template_settings', {})
-        lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
-        if isinstance(tpl, template_adapter):
-            TEMPLATES[tpl] = tpl
-            if settings: TEMPLATES[tpl].prepare(**settings)
+        if isinstance(tpl, adapter):
+            TEMPLATES[tplid] = tpl
+            if settings: TEMPLATES[tplid].prepare(**settings)
         elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
-            TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings)
+            TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
         else:
-            TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings)
-    if not TEMPLATES[tpl]:
+            TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
+    if not TEMPLATES[tplid]:
         abort(500, 'Template (%s) not found' % tpl)
     for dictarg in args[1:]: kwargs.update(dictarg)
-    return TEMPLATES[tpl].render(kwargs)
+    return TEMPLATES[tplid].render(kwargs)
 
 mako_template = functools.partial(template, template_adapter=MakoTemplate)
 cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
 jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
-simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate)
 
 
 def view(tpl_name, **defaults):
@@ -3097,6 +3617,8 @@
                 tplvars = defaults.copy()
                 tplvars.update(result)
                 return template(tpl_name, **tplvars)
+            elif result is None:
+                return template(tpl_name, defaults)
             return result
         return wrapper
     return decorator
@@ -3104,7 +3626,6 @@
 mako_view = functools.partial(view, template_adapter=MakoTemplate)
 cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
 jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
-simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate)
 
 
 
@@ -3124,6 +3645,7 @@
 #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
 HTTP_CODES = httplib.responses
 HTTP_CODES[418] = "I'm a teapot" # RFC 2324
+HTTP_CODES[422] = "Unprocessable Entity" # RFC 4918
 HTTP_CODES[428] = "Precondition Required"
 HTTP_CODES[429] = "Too Many Requests"
 HTTP_CODES[431] = "Request Header Fields Too Large"
@@ -3134,11 +3656,10 @@
 ERROR_PAGE_TEMPLATE = """
 %%try:
     %%from %s import DEBUG, HTTP_CODES, request, touni
-    %%status_name = HTTP_CODES.get(e.status, 'Unknown').title()
     <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
     <html>
         <head>
-            <title>Error {{e.status}}: {{status_name}}</title>
+            <title>Error: {{e.status}}</title>
             <style type="text/css">
               html {background-color: #eee; font-family: sans;}
               body {background-color: #fff; border: 1px solid #ddd;
@@ -3147,10 +3668,10 @@
             </style>
         </head>
         <body>
-            <h1>Error {{e.status}}: {{status_name}}</h1>
+            <h1>Error: {{e.status}}</h1>
             <p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
                caused an error:</p>
-            <pre>{{e.output}}</pre>
+            <pre>{{e.body}}</pre>
             %%if DEBUG and e.exception:
               <h2>Exception:</h2>
               <pre>{{repr(e.exception)}}</pre>
@@ -3167,7 +3688,7 @@
 %%end
 """ % __name__
 
-#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a 
+#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
 #: request callback, this instance always refers to the *current* request
 #: (even on a multithreaded server).
 request = LocalRequest()
@@ -3186,7 +3707,7 @@
 
 #: A virtual package that redirects import statements.
 #: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
-ext = _ImportRedirect(__name__+'.ext', 'bottle_%s').module
+ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
 
 if __name__ == '__main__':
     opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
@@ -3202,10 +3723,11 @@
     sys.modules.setdefault('bottle', sys.modules['__main__'])
 
     host, port = (opt.bind or 'localhost'), 8080
-    if ':' in host:
+    if ':' in host and host.rfind(']') < host.rfind(':'):
         host, port = host.rsplit(':', 1)
-
-    run(args[0], host=host, port=port, server=opt.server,
+    host = host.strip('[]')
+
+    run(args[0], host=host, port=int(port), server=opt.server,
         reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
 
 
--- a/web/config.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/config.py	Sat Jul 06 18:29:45 2019 +0800
@@ -9,31 +9,38 @@
 
 # local config items
 HMAC_KEY = 'a hmac key' 
-ALLOWED_USERS = [] # list of sha1 hashes of client ssl keys
-SSH_HOST = 'remotehost'
-SSH_KEYFILE = '/home/matt/.ssh/somekey'
-SSH_PROG = 'ssh'
+ALLOWED_USERS = [] # list of hashes allowed, as provided by the Email link
 
 UPDATE_URL = 'http://evil.ucc.asn.au/~matt/templog/update'
 
-GRAPH_WIDTH = 1200
-GRAPH_HEIGHT = 600
+EMAIL = "[email protected]"
+
+GRAPH_WIDTH = 600
+GRAPH_HEIGHT = 700
 ZOOM = 1
+# determine by viewing the image
+GRAPH_LEFT_MARGIN = 65
 
 LINE_WIDTH = 2
 
-SENSOR_NAMES = {'sensor_28 CE B2 1A 03 00 00 99': "Old Fridge",
+SENSOR_NAMES = {
+    'sensor_28 CE B2 1A 03 00 00 99': "Old Fridge",
     'sensor_28 CC C1 1A 03 00 00 D4': "Old Ambient",
     'sensor_28 49 BC 1A 03 00 00 54': "Old Wort",
     'sensor_voltage': 'Voltage',
     'sensor_fridge_setpoint': 'Setpoint',
     'sensor_fridge_on': 'Cool',
-    'sensor_28-0000042cf4dd': "Wort",
-    'sensor_28-0000042cccc4': "Fridge",
-    'sensor_28-0000042c6dbb': "Ambient",
+    'sensor_28-0000042cf4dd': "New Old Wort",
+    'sensor_28-0000042d36cc': "Wort",
+    'sensor_28-0000042cccc4': "OldFridge",
+    'sensor_28-0000042c6dbb': "New Old Fridge",
+    'sensor_28-0000068922df': "Fridge",
     'sensor_internal': "Processor",
     }
 
+# print legend for these ones
+LEGEND_NAMES = set(("Wort", "Fridge", "Ambient", "Setpoint"))
+
 SENSOR_COLOURS = {'Wort': 'e49222', 
                 'Ambient': '028b3d',
                 'Voltage': '7db5d3aa',
@@ -47,12 +54,12 @@
 GRAPH_FONT = "Prociono"
 #GRAPH_FONT = "URW Gothic L"
 
-# determine by zooming in an image viewer
-GRAPH_LEFT_MARGIN = 63
 
 # 1 hour
 CSRF_TIMEOUT = 3600
 
+LONG_POLL_TIMEOUT = 500
+
 try:
     from localconfig import *
 except ImportError:
--- a/web/log.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/log.py	Sat Jul 06 18:29:45 2019 +0800
@@ -20,6 +20,9 @@
 
 import config
 import atomicfile
+import settings
+
+fridge_settings = settings.Settings()
 
 def sensor_rrd_path(s):
     return '%s/sensor_%s.rrd' % (config.DATA_PATH, str(s))
@@ -48,8 +51,6 @@
                 'DS:temp:GAUGE:600:-100:500',
                 'RRA:AVERAGE:0.5:1:1051200']
 
-    print>>sys.stderr, sensor_rrd_path(sensor_id) 
-
     rrdtool.create(sensor_rrd_path(sensor_id), 
                 '--start', 'now-60d',
                 *args)
@@ -103,7 +104,7 @@
             graph_args.append('DEF:raw%(vname)s=%(rrdfile)s:temp:AVERAGE' % locals())
             # limit max temp to 50
             graph_args.append('CDEF:%(vname)s=raw%(vname)s,38,GT,UNKN,raw%(vname)s,%(volts_mult)f,*,%(volts_shift)f,+,IF' % locals())
-            unit = '<span face="Liberation Serif">º</span>C'
+            unit = '<span face="Liberation Serif">°</span>C'
 
         format_last_value = None
         if unit:
@@ -115,11 +116,13 @@
         width = config.LINE_WIDTH
         legend = config.SENSOR_NAMES.get(sensor, sensor)
         colour = config.SENSOR_COLOURS.get(legend, colour_from_string(sensor))
-        if format_last_value:
-            print_legend = '%s (%s)' % (legend, format_last_value)
-        else:
-            print_legend = legend
-        sensor_lines.append( (legend, 'LINE%(width)f:%(vname)s#%(colour)s:%(print_legend)s' % locals()) )
+        print_legend = ''
+        if legend in config.LEGEND_NAMES:
+            if format_last_value:
+                print_legend = ':%s (%s)' % (legend, format_last_value)
+            else:
+                print_legend = ":%s" % legend
+        sensor_lines.append( (legend, 'LINE%(width)f:%(vname)s#%(colour)s%(print_legend)s' % locals()) )
         if legend == 'Wort':
             wort_sensor = vname
         elif legend == 'Fridge':
@@ -127,20 +130,18 @@
 
     # calculated bits
     colour = '000000'
-    print_legend = 'Heat'
     graph_args.append('CDEF:wortdel=%(wort_sensor)s,PREV(%(wort_sensor)s),-' % locals())
     graph_args.append('CDEF:tempdel=%(wort_sensor)s,%(fridge_sensor)s,-' % locals())
     graph_args.append('CDEF:fermheat=wortdel,80,*,tempdel,0.9,*,+' % locals())
     graph_args.append('CDEF:trendfermheat=fermheat,7200,TRENDNAN' % locals())
     graph_args.append('CDEF:limitfermheat=trendfermheat,5,+,11,MIN,2,MAX' % locals())
-    graph_args.append('LINE0.5:limitfermheat#%(colour)s:%(print_legend)s' % locals())
+    graph_args.append('LINE0.5:limitfermheat#%(colour)s' % locals())
 
     # lines are done afterwards so they can be layered
     sensor_lines.sort(key = lambda (legend, line): "Wort" in legend)
     graph_args += (line for (legend, line) in sensor_lines)
 
-    print>>sys.stderr, '\n'.join(graph_args)
-
+    #print>>sys.stderr, '\n'.join(graph_args)
 
     end = int(start+length)
     start = int(start)
@@ -220,8 +221,9 @@
 
 def record_debug(params):
     f = debug_file('a+')
-    f.write('===== %s =====\n' % time.strftime('%a, %d %b %Y %H:%M:%S'))
+    f.write('===== start %s =====\n' % time.strftime('%a, %d %b %Y %H:%M:%S'))
     json.dump(params, f, sort_keys=True, indent=4)
+    f.write('===== end %s =====\n' % time.strftime('%a, %d %b %Y %H:%M:%S'))
     f.flush()
     return f
 
@@ -243,17 +245,12 @@
     tick_secs = int(entries['tick_secs'])
     return val_ticks + float(val_rem) * tick_secs / tick_wake
 
-def write_current_params(current_params):
-    out = {}
-    out['params'] = current_params
-    out['time'] = time.time()
-    atomicfile.AtomicFile("%s/current_params.txt" % config.DATA_PATH).write(
-        json.dumps(out, sort_keys=True, indent=4)+'\n')
+def write_current_params(current_params, current_epoch):
+    fridge_settings.update(current_params, current_epoch)
 
 def read_current_params():
-    p = atomicfile.AtomicFile("%s/current_params.txt" % config.DATA_PATH).read()
-    dat = json.loads(p)
-    return dat['params']
+    params, epochtag = fridge_settings.get()
+    return params
 
 def parse(params):
 
@@ -277,10 +274,11 @@
 
     # one-off measurements here
     current_params = params['current_params']
+    current_epoch = params['current_params_epoch']
     measurements['fridge_on'] = [ (time.time(), params['fridge_on']) ]
     measurements['fridge_setpoint'] = [ (time.time(), current_params['fridge_setpoint']) ]
 
-    write_current_params(current_params)
+    write_current_params(current_params, current_epoch)
 
     for s, vs in measurements.iteritems():
         sensor_update(s, vs)
@@ -289,22 +287,34 @@
     debugf.write("Updated sensors in %.2f secs\n" % timedelta)
     debugf.flush()
 
+# types used here define the type of a field
 _FIELD_DEFAULTS = {
     'fridge_setpoint': 16.0,
     'fridge_difference': 0.2,
     'overshoot_delay': 720, # 12 minutes
-    'overshoot_factor': 1, # ºC
+    'overshoot_factor': 1.0, # °C
     'disabled': False,
     'nowort': True,
     'fridge_range_lower': 3,
     'fridge_range_upper': 3,
     }
 
+def fake_params():
+    """ for quicker testing """
+    r = []
+    r.append({'name': 'going', 'value': 'true', 'kind': 'yesno', 'title': 'going'})
+    r.append({'name': 'temperature', 'value': 12.5, 'kind': 'number', 'title': 'temperature', 'digits': 1, 'amount': 0.1, 'unit': '°'})
+    return r
+
 def get_params():
+    """ Can return None if there aren't any parameters yet,
+    otherwise returns the parameter list """
 
     r = []
 
     vals = read_current_params()
+    if not vals:
+        return None
 
     for k, v in _FIELD_DEFAULTS.iteritems():
         n = {'name': k, 'value': type(v)(vals[k])}
@@ -317,7 +327,7 @@
                 n['amount'] = 60
                 n['digits'] = 0;
             else:
-                n['unit'] = 'º'
+                n['unit'] = '°'
                 n['amount'] = 0.1;
                 n['digits'] = 1;
         n['kind'] = kind
@@ -326,26 +336,6 @@
 
     return json.dumps(r, sort_keys=True, indent=4)
 
-def send_params(params):
-    # 'templog_receive' is ignored due to authorized_keys
-    # restrictions. the rpi has authorized_keys with
-    # command="/home/matt/templog/venv/bin/python /home/matt/templog/py/receive.py",no-pty,no-port-forwarding,no-x11-forwarding,no-agent-forwarding ssh-rsa AAAAB3NzaC....
-    args = [config.SSH_PROG, '-i', config.SSH_KEYFILE,
-        config.SSH_HOST, 'templog_receive']
-    try:
-        p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-        (out, err) = p.communicate(json.dumps(params))
-    except OSError, e:
-        print>>sys.stderr, e
-        return "Failed update"
-
-    if 'Good Update' in out:
-        return True
-
-    print>>sys.stderr, "Strange return from update:"
-    print>>sys.stderr, out
-    return "Unexpected update result"
-
 def same_type(a, b):
     ta = type(a)
     tb = type(b)
@@ -370,10 +360,7 @@
         if not same_type(v, _FIELD_DEFAULTS[k]):
             return "Bad type for %s, %s vs %s" % (k , type(v), type(_FIELD_DEFAULTS[k]))
 
-    ret = send_params(params) 
-    if ret is not True:
-        return "Failed sending params: %s" % ret
-
+    fridge_settings.update(params)
     return True
 
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/requirements.txt	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,11 @@
+argparse==1.2.1
+gevent==1.0.2
+greenlet==0.4.7
+
+# sha256: nkIlLxfR3YnuMXReDE--WIYsJRR-sO9SlcnNm8tOosE
+lockfile==0.10.2
+
+peep==2.4.1
+python-rrdtool==1.4.7
+uWSGI==2.0.10
+wsgiref==0.1.2
--- a/web/secure.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/secure.py	Sat Jul 06 18:29:45 2019 +0800
@@ -11,56 +11,47 @@
 
 import config
 
-__all__ = ["get_csrf_blob", "check_csrf_blob", "setup_csrf", "get_user_hash",
-"check_user_hash"]
+__all__ = [
+    "get_csrf_blob", 
+    "check_csrf_blob", 
+    "setup_csrf", 
+    "check_cookie",
+    "init_cookie",
+]
+
+AUTH_COOKIE = 'templogauth'
+AUTH_COOKIE_LEN = 16
 
 HASH=hashlib.sha1
 
 CLEAN_RE = re.compile('[^a-z0-9A-Z]')
 
-def clean_hash(h):
-    return CLEAN_RE.sub('', h.lower())
-
-def get_user_hash():
-    """
-    Uses the following apache config. 
-    Needs a separate port or IP to no-certificate SSL, SNI isn't good enough.
-
-    <location /~matt/templog/set>
-    Require all granted
-    SSLVerifyClient optional_no_ca
-    SSLVerifyDepth 1
-    SSLOptions +StdEnvVars +ExportCertData +OptRenegotiate
-    </location>
-    """
+def cookie_hash(c):
+    return hashlib.sha256(c).hexdigest()
 
-    verify = bottle.request.environ.get('SSL_CLIENT_VERIFY', '')
-    if not (verify == 'GENEROUS' or verify == 'SUCCESS'):
-        return 'FAILVERIFY'
-    blob = bottle.request.environ.get('SSL_CLIENT_CERT')
-    if not blob:
-        return 'NOCERT'
+def init_cookie():
+    """ Generates a new httponly auth cookie if required. 
+    Returns the hash of the cookie (new or existing)
+    """
+    c = bottle.request.get_cookie(AUTH_COOKIE)
+    if not c:
+        c = binascii.hexlify(os.urandom(AUTH_COOKIE_LEN))
+        bottle.response.set_cookie(AUTH_COOKIE, c, secure=True, httponly=True)
+    return cookie_hash(c)
 
-    b64 = ''.join(l for l in blob.split('\n')
-        if not l.startswith('-'))
-
-    return HASH(binascii.a2b_base64(b64)).hexdigest()
-
-def check_user_hash(allowed_users):
-    current_hash = clean_hash(get_user_hash())
-    for a in allowed_users:
-        if current_hash == clean_hash(a):
-            return True
-    return False
+def check_cookie(allowed_users):
+    c = bottle.request.get_cookie(AUTH_COOKIE)
+    if not c:
+        return False
+    return cookie_hash(c) in allowed_users
 
 def setup_csrf():
     NONCE_SIZE=16
     global _csrf_fd, _csrf_key
-    _csrf_fd = open('%s/csrf.dat' % config.DATA_PATH, 'r+')
+    _csrf_fd = os.fdopen(os.open('%s/csrf.dat' % config.DATA_PATH, os.O_RDWR | os.O_CREAT, 0600), 'r+')
 
     try:
         fcntl.lockf(_csrf_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
-        os.fchmod(_csrf_fd.fileno(), 0600)
         _csrf_fd.write("%d-%s" % (os.getpid(), binascii.hexlify(os.urandom(NONCE_SIZE))))
         _csrf_fd.flush()
         _csrf_fd.seek(0)
@@ -73,7 +64,7 @@
 
 def get_csrf_blob():
     expiry = int(config.CSRF_TIMEOUT + time.time())
-    content = '%s-%s' % (get_user_hash(), expiry)
+    content = '%s-%s' % (init_cookie(), expiry)
     mac = hmac.new(_csrf_key, content).hexdigest()
     return "%s-%s" % (content, mac)
 
@@ -84,7 +75,7 @@
         return False
 
     user, expiry, mac = toks
-    if user != get_user_hash():
+    if user != init_cookie():
         print>>sys.stderr, "wrong user"
         return False
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/settings.py	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,64 @@
+import gevent
+import fcntl
+import hashlib
+
+import binascii
+import os
+
+class Settings(object):
+    RAND_SIZE = 15 # 120 bits
+
+    """ Handles state updates from both the web UI and from the fridge client.
+    The fridge client is canonical. It provides the epoch (apart from 'startepoch'), that
+    is changed any time the fridge reloads its local config. The fridge only accepts
+    updates that have the same epoch.
+
+    When the web UI changes it keeps the same epoch but generates a new tag. The fridge sends
+    its current known tag and waits for it to change.
+
+    content is opaque, presently a dictionary of decoded json 
+    """
+
+    def __init__(self):
+        self.event = gevent.event.Event()
+        self.contents = None
+        self.epoch = None
+        self.tag = None
+
+        self.update(None, 'startepoch')
+
+    def wait(self, epoch_tag = None, timeout = None):
+        """ returns false if the timeout was hit """
+        if self.epoch_tag() != epoch_tag:
+            # has alredy changed
+            return True
+        return self.event.wait(timeout)
+
+    def epoch_tag(self):
+        return '%s-%s' % (self.epoch, self.tag)
+
+    def random(self):
+        return binascii.hexlify(os.urandom(self.RAND_SIZE))
+
+    def update(self, contents, epoch = None):
+        """ replaces settings contents and updates waiters if changed """
+        if epoch:
+            if self.epoch == epoch:
+                return
+            else:
+                self.epoch = epoch
+
+        self.tag = self.random()
+        self.contents = contents
+
+        self.event.set()
+        self.event.clear()
+
+    def get(self):
+        """ Returns (contents, epoch-tag) """
+        return self.contents, self.epoch_tag()
+
+
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/templog-uwsgi.ini	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,20 @@
+[uwsgi]
+
+# run with "/path/to/venv/bin/uwsgi --chdir /path/to/web /path/to/web/templog.uwsgi"
+# tested with Apache mod_proxy_uwsgi and 
+# ProxyPass /~matt/t2 uwsgi://127.0.0.1:9090/
+
+# future: apache > 2.4.9 could use "socket=uwsgi.sock" and then apache config of
+# ProxyPass /templog unix:/path/to/web/uwsgi.sock|uwsgi://
+socket=127.0.0.1:9090
+
+wsgi-file=templog.uwsgi
+gevent=100
+
+# for client certificates. default limit is 4096, not sufficient
+buffer-size=20000
+
+user = matt
+group = matt
+
+chdir = /home/matt/templog/web
--- a/web/templog.py	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/templog.py	Sat Jul 06 18:29:45 2019 +0800
@@ -11,6 +11,7 @@
 import os
 import traceback
 import fcntl
+import hashlib
 
 import bottle
 from bottle import route, request, response
@@ -23,12 +24,22 @@
 DATE_FORMAT = '%Y%m%d-%H.%M'
 ZOOM_SCALE = 2.0
 
+class TemplogBottle(bottle.Bottle):
+    def run(*args, **argm):
+        argm['server'] = 'gevent'
+        super(TemplogBottle, self).run(*args, **argm)
+
+bottle.default_app.push(TemplogBottle())
+
+secure.setup_csrf()
+
 @route('/update', method='post')
 def update():
     js_enc = request.forms.data
     mac = request.forms.hmac
 
-    if hmac.new(config.HMAC_KEY, js_enc).hexdigest() != mac:
+    h = hmac.new(config.HMAC_KEY, js_enc.strip(), hashlib.sha256).hexdigest()
+    if h != mac:
         raise bottle.HTTPError(code = 403, output = "Bad key")
 
     js = zlib.decompress(binascii.a2b_base64(js_enc))
@@ -39,50 +50,69 @@
 
     return "OK"
 
-@route('/graph.png')
-def graph():
-    length_minutes = int(request.query.length)
-    end = datetime.strptime(request.query.end, DATE_FORMAT)
+def make_graph(length, end):
+    length_minutes = int(length)
+    end = datetime.strptime(end, DATE_FORMAT)
     start = end - timedelta(minutes=length_minutes)
 
-    response.set_header('Content-Type', 'image/png')
     start_epoch = time.mktime(start.timetuple())
     return log.graph_png(start_epoch, length_minutes * 60)
 
+def encode_data(data, mimetype):
+    return 'data:%s;base64,%s' % (mimetype, binascii.b2a_base64(data).rstrip())
+
+@route('/graph.png')
+def graph():
+    response.set_header('Content-Type', 'image/png')
+    minutes, endstr = get_request_zoom()
+    return make_graph(minutes, endstr)
+
 @route('/set/update', method='post')
 def set_update():
+    if not secure.check_cookie(config.ALLOWED_USERS):
+        # the "Save" button should be disabled if the cert wasn't
+        # good
+        response.status = 403
+        return "No cert, dodginess"
+
     post_json = json.loads(request.forms.data)
 
     csrf_blob = post_json['csrf_blob']
 
     if not secure.check_csrf_blob(csrf_blob):
-        bottle.response.status = 403
+        response.status = 403
         return "Bad csrf"
 
     ret = log.update_params(post_json['params'])
     if not ret is True:
-        bottle.response.status = 403
+        response.status = 409 # Conflict
         return ret
         
     return "Good"
 
 @route('/set')
 def set():
-    allowed = ["false", "true"][secure.check_user_hash(config.ALLOWED_USERS)]
+    cookie_hash = secure.init_cookie()
+    allowed = ["false", "true"][secure.check_cookie(config.ALLOWED_USERS)]
     response.set_header('Cache-Control', 'no-cache')
-    return bottle.template('set', 
-        inline_data = log.get_params(), 
-        csrf_blob = secure.get_csrf_blob(),
-        allowed = allowed)
+    if request.query.fake:
+        inline_data = log.fake_params()
+    else:
+        inline_data = log.get_params()
+    if not inline_data:
+        response.status = 503 # Service Unavailable
+        return bottle.template('noparamsyet')
 
-@route('/set_current.json')
-def set_fresh():
-    response.set_header('Content-Type', 'application/javascript')
-    return log.get_current()
+    return bottle.template('set', 
+        inline_data = inline_data,
+        csrf_blob = secure.get_csrf_blob(),
+        allowed = allowed,
+        cookie_hash = cookie_hash,
+        email = urllib.quote(config.EMAIL))
 
-@route('/')
-def top():
-
+def get_request_zoom():
+    """ returns (length, end) tuple.
+    length is in minutes, end is a DATE_FORMAT string """
     minutes = int(request.query.get('length', 26*60))
 
     if 'end' in request.query:
@@ -93,7 +123,8 @@
     if 'zoom' in request.query:
         orig_start = end - timedelta(minutes=minutes)
         orig_end = end
-        xpos = int(request.query.x)
+        scale = float(request.query.scaledwidth) / config.GRAPH_WIDTH
+        xpos = int(request.query.x) / scale
         xpos -= config.GRAPH_LEFT_MARGIN * config.ZOOM
 
         if xpos >= 0 and xpos < config.GRAPH_WIDTH * config.ZOOM:
@@ -109,14 +140,24 @@
 
     if end > datetime.now():
         end = datetime.now()
-        
+
+    endstr = end.strftime(DATE_FORMAT)
+    return (minutes, endstr)
+
+@route('/')
+def top():
+    minutes, endstr = get_request_zoom()
+
     request.query.replace('length', minutes)
-    request.query.replace('end', end.strftime(DATE_FORMAT))
+    request.query.replace('end', endstr)
 
     urlparams = urllib.urlencode(request.query)
+    graphdata = encode_data(make_graph(minutes, endstr), 'image/png')
     return bottle.template('top', urlparams=urlparams,
-                    end = end.strftime(DATE_FORMAT),
-                    length = minutes)
+                    end = endstr,
+                    length = minutes,
+                    graphwidth = config.GRAPH_WIDTH,
+                    graphdata = graphdata)
 
 @route('/debug')
 def debuglog():
@@ -133,14 +174,37 @@
     #var_lookup = environ['mod_ssl.var_lookup']
     #return var_lookup("SSL_SERVER_I_DN_O")
 
+@route('/h')
+def headers():
+    response.set_header('Content-Type', 'text/plain')
+    return '\n'.join("%s: %s" % x for x in request.headers.items())
+
+@route('/get_settings')
+def get_settings():
+    response.set_header('Cache-Control', 'no-cache')
+    req_etag = request.headers.get('etag', None)
+    if req_etag:
+        # wait for it to change
+        # XXX this is meant to return True if it has been woken up
+        # but it isn't working. Instead compare epochtag below.
+        log.fridge_settings.wait(req_etag, timeout=config.LONG_POLL_TIMEOUT)
+
+    contents, epoch_tag = log.fridge_settings.get()
+    if epoch_tag == req_etag:
+        response.status = 304
+        return "Nothing happened"
+
+    response.set_header('Content-Type', 'application/json')
+    return json.dumps({'params': contents, 'epoch_tag': epoch_tag})
+
 @bottle.get('/<filename:re:.*\.js>')
 def javascripts(filename):
     response.set_header('Cache-Control', "public, max-age=1296000")
     return bottle.static_file(filename, root='static')
 
-secure.setup_csrf()
 
 def main():
+    """ for standalone testing """
     #bottle.debug(True)
     #bottle.run(reloader=True)
     bottle.run(server='cgi', reloader=True)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/templog.uwsgi	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,20 @@
+# see templog-uwsgi.ini for arguments
+
+from gevent import monkey; monkey.patch_all()
+
+import os
+import sys
+import trace
+# Change working directory so relative paths (and template lookup) work again
+thisdir = os.path.dirname(__file__)
+if not thisdir:
+    thisdir="."
+os.chdir(thisdir)
+os.environ['LD_LIBRARY_PATH'] = '/home/matt/templog/web'
+
+# for some reason local imports don't work...
+sys.path.append(thisdir)
+
+import bottle
+import templog
+application = bottle.default_app()
--- a/web/templog.wsgi	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/templog.wsgi	Sat Jul 06 18:29:45 2019 +0800
@@ -1,3 +1,5 @@
+#from gevent import monkey; monkey.patch_all()
+
 import os
 import sys
 # Change working directory so relative paths (and template lookup) work again
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/noparamsyet.tpl	Sat Jul 06 18:29:45 2019 +0800
@@ -0,0 +1,21 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+<head>
+<title>Try again soon</title>
+<meta name="viewport" content="width=device-width">
+<style type="text/css">
+    body {
+        font-family: monospace;
+        text-align: center;
+    }
+</style>
+</head>
+<body>
+<p>
+The templog fridge client hasn't sent its current settings to the server yet. 
+</p>
+<p>
+Try again in a minute or two.
+</p>
+</body>
+</html>
--- a/web/views/set.tpl	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/views/set.tpl	Sat Jul 06 18:29:45 2019 +0800
@@ -8,6 +8,7 @@
 <script>
 %include riot.min.js
 </script>
+<meta name="theme-color" content="#fff">
 
 <style type="text/css">
 span.no_selection {
@@ -20,6 +21,10 @@
     font-family: sans-serif;
 }
 
+a {
+    color: #000;
+}
+
 input {
     border: 2px solid transparent;
     border-radius: 4px;
@@ -28,6 +33,8 @@
     padding: 0;
     font-size: 30pt;
     height: 34pt;
+    vertical-align: middle;
+    line-height: 1em;
 }
 
 input[type="button"] {
@@ -36,7 +43,6 @@
     -webkit-appearance: none;
     -moz-appearance: none;
     background:#fff;
-    vertical-align: center;
 }
 
 input[type="submit"] {
@@ -80,6 +86,10 @@
     //vertical-align: center;
 }
 
+#mailauth {
+    display: none;
+}
+
 </style>
 <title>Set templog</title>
 </head>
@@ -123,6 +133,10 @@
         {
             param.oldvaluetext = param.oldvalue ? "Yes" : "No";
         }
+        else if (param.kind == "number")
+        {
+            param.oldvaluetext = Number(param.oldvalue).toFixed(param.digits)
+        }
         else
         {
             param.oldvaluetext = param.oldvalue;
@@ -228,6 +242,7 @@
     if (!allowed) {
         $("#savebutton").attr("disabled", true);
         $('#status').text("No cert")
+        $('#mailauth').show();
     }
 
     $("#savebutton").click(function() {
@@ -313,6 +328,8 @@
 <span id="savebox">
 <input type="button" id="savebutton" value="Save"/>
 <span id="status"></span>
+<span id="mailauth"> <a href="mailto:{{email}}?Subject=Allow%20Templog&body=Hash%20is%20{{cookie_hash}}">Email</a>
+</span>
 </span>
 
 
--- a/web/views/top.tpl	Thu Mar 19 21:50:52 2015 +0800
+++ b/web/views/top.tpl	Sat Jul 06 18:29:45 2019 +0800
@@ -1,7 +1,10 @@
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
 <head>
-<style type="text/css"><!--
+<title>Wort Temperature Log</title>
+<meta name="viewport" content="width=device-width">
+<meta name="theme-color" content="#fff">
+<style type="text/css">
 span.no_selection {
     -webkit-user-select: none; // webkit (safari, chrome) browsers
     -moz-user-select: none; // mozilla browsers
@@ -12,19 +15,30 @@
  font-size: 70%;
  text-align: right;
 }
-//-->
+
+#mainimage {
+	width: 100%;
+	max-width: {{graphwidth}}px;
+}
 </style>
 <title></title>
 </head>
-
-<!-- Click on the graph to zoom in, click on the Y axis labels to zoom out -->
+<script type="text/javascript">
+function updatewidth() {
+	var width_input = document.getElementById("scaledwidth");
+	var main_image = document.getElementById("mainimage");
+	width_input.value = main_image.clientWidth;
+	return true;
+}
 
+</script>
 <body>
-<form action="" method="get">
-<span class="no_selection"><input type="image" style="width: 1200px" src="graph.png?{{urlparams}}"/></span>
+<form action="" method="get" onsubmit="return updatewidth();">
+<span class="no_selection"><input type="image" id="mainimage" src="{{graphdata}}"/></span>
 <input type="hidden" name="length" value="{{length}}"/>
 <input type="hidden" name="end" value="{{end}}"/>
 <input type="hidden" name="zoom" value="yeah"/>
+<input type="hidden" name="scaledwidth" id="scaledwidth" value="{{graphwidth}}"/>
 </form>
 <span class="codelink">Click to zoom in, click the left axis to zoom out. <a href="https://secure.ucc.asn.au/hg/templog/file/tip">Source code</a> for the Raspberry Pi controller and this web interface</a>. <a href="set">Adjustments</a> by phone.</span>
 </body>