@@ -0,0 +1,4 @@
|
||||
.DS_Store
|
||||
|
||||
# utemplate files
|
||||
/templates/*.py
|
||||
@@ -0,0 +1,7 @@
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -f templates/*.py
|
||||
|
||||
.PHONY: run
|
||||
run:
|
||||
micropython main.py
|
||||
@@ -0,0 +1,15 @@
|
||||
TfL Countdown
|
||||
=============
|
||||
|
||||
This is a personalised TfL Countdown website that's supposed to
|
||||
show when the next bus departs from your nearest bus stop(s).
|
||||
|
||||
It's written in [MicroPython](https://micropython.org).
|
||||
|
||||
make run
|
||||
|
||||
This runs the `main.py` using `micropython` (needs to be installed).
|
||||
|
||||
make clean
|
||||
|
||||
This cleans all the compiled templates from the `templates` directory.
|
||||
@@ -0,0 +1,32 @@
|
||||
from microdot import Microdot
|
||||
from microdot.utemplate import Template
|
||||
import tflcountdown as tfl
|
||||
|
||||
# utemplate doc: https://github.com/pfalcon/utemplate
|
||||
|
||||
API_KEY = "NOT_YET_REQUIRED"
|
||||
# Stop-IDs from https://tfl.gov.uk/bus-stops.csv
|
||||
STOP_IDS = {
|
||||
"Acton Vale (N)": "1597",
|
||||
"Acton Vale (S)": "1598",
|
||||
"Abinger Road (N)": "11333",
|
||||
"Abinger Road (S)": "11334"
|
||||
|
||||
#H1227,58839,490018676N,Hail & Ride Larden Road,521409,179656,350,6408,1
|
||||
#H1228,N/A,490018676S,Hail & Ride Larden Road,521420,179655,170,6408,1
|
||||
}
|
||||
LINE_IDS = {
|
||||
"272": "272"
|
||||
}
|
||||
|
||||
app = Microdot()
|
||||
tflc = tfl.TflCountdown(API_KEY)
|
||||
|
||||
@app.route("/")
|
||||
async def index(request):
|
||||
response = tflc.get_countdown(["1597", "1598", "11333", "11334", "R0199"])
|
||||
data = tflc.parse_countdown(response.text)
|
||||
print(repr(data))
|
||||
return Template("index.html").render(data), {"Content-Type": "text/html"}
|
||||
|
||||
app.run(port=5001, debug=True)
|
||||
@@ -0,0 +1,2 @@
|
||||
from microdot.microdot import Microdot, Request, Response, abort, redirect, \
|
||||
send_file, URLPattern, AsyncBytesIO, iscoroutine # noqa: F401
|
||||
@@ -0,0 +1,8 @@
|
||||
try:
|
||||
from functools import wraps
|
||||
except ImportError: # pragma: no cover
|
||||
# MicroPython does not currently implement functools.wraps
|
||||
def wraps(wrapped):
|
||||
def _(wrapper):
|
||||
return wrapper
|
||||
return _
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,291 @@
|
||||
import os
|
||||
from random import choice
|
||||
from microdot import abort, iscoroutine, AsyncBytesIO
|
||||
from microdot.helpers import wraps
|
||||
|
||||
|
||||
class FormDataIter:
|
||||
"""Asynchronous iterator that parses a ``multipart/form-data`` body and
|
||||
returns form fields and files as they are parsed.
|
||||
|
||||
:param request: the request object to parse.
|
||||
|
||||
Example usage::
|
||||
|
||||
from microdot.multipart import FormDataIter
|
||||
|
||||
@app.post('/upload')
|
||||
async def upload(request):
|
||||
async for name, value in FormDataIter(request):
|
||||
print(name, value)
|
||||
|
||||
The iterator returns no values when the request has a content type other
|
||||
than ``multipart/form-data``. For a file field, the returned value is of
|
||||
type :class:`FileUpload`, which supports the
|
||||
:meth:`read() <FileUpload.read>` and :meth:`save() <FileUpload.save>`
|
||||
methods. Values for regular fields are provided as strings.
|
||||
|
||||
The request body is read efficiently in chunks of size
|
||||
:attr:`buffer_size <FormDataIter.buffer_size>`. On iterations in which a
|
||||
file field is encountered, the file must be consumed before moving on to
|
||||
the next iteration, as the internal stream stored in ``FileUpload``
|
||||
instances is invalidated at the end of the iteration.
|
||||
"""
|
||||
#: The size of the buffer used to read chunks of the request body.
|
||||
buffer_size = 256
|
||||
|
||||
def __init__(self, request):
|
||||
self.request = request
|
||||
self.buffer = None
|
||||
try:
|
||||
mimetype, boundary = request.content_type.rsplit('; boundary=', 1)
|
||||
except ValueError:
|
||||
return # not a multipart request
|
||||
if mimetype.split(';', 1)[0] == \
|
||||
'multipart/form-data': # pragma: no branch
|
||||
self.boundary = b'--' + boundary.encode()
|
||||
self.extra_size = len(boundary) + 4
|
||||
self.buffer = b''
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
if self.buffer is None:
|
||||
raise StopAsyncIteration
|
||||
|
||||
# make sure we have consumed the previous entry
|
||||
while await self._read_buffer(self.buffer_size) != b'':
|
||||
pass
|
||||
|
||||
# make sure we are at a boundary
|
||||
s = self.buffer.split(self.boundary, 1)
|
||||
if len(s) != 2 or s[0] != b'':
|
||||
abort(400) # pragma: no cover
|
||||
self.buffer = s[1]
|
||||
if self.buffer[:2] == b'--':
|
||||
# we have reached the end
|
||||
raise StopAsyncIteration
|
||||
elif self.buffer[:2] != b'\r\n':
|
||||
abort(400) # pragma: no cover
|
||||
self.buffer = self.buffer[2:]
|
||||
|
||||
# parse the headers of this part
|
||||
name = ''
|
||||
filename = None
|
||||
content_type = None
|
||||
while True:
|
||||
await self._fill_buffer()
|
||||
lines = self.buffer.split(b'\r\n', 1)
|
||||
if len(lines) != 2:
|
||||
abort(400) # pragma: no cover
|
||||
line, self.buffer = lines
|
||||
if line == b'':
|
||||
# we reached the end of the headers
|
||||
break
|
||||
header, value = line.decode().split(':', 1)
|
||||
header = header.lower()
|
||||
value = value.strip()
|
||||
if header == 'content-disposition':
|
||||
parts = value.split(';')
|
||||
if len(parts) < 2 or parts[0] != 'form-data':
|
||||
abort(400) # pragma: no cover
|
||||
for part in parts[1:]:
|
||||
part = part.strip()
|
||||
if part.startswith('name="'):
|
||||
name = part[6:-1]
|
||||
elif part.startswith('filename="'): # pragma: no branch
|
||||
filename = part[10:-1]
|
||||
elif header == 'content-type': # pragma: no branch
|
||||
content_type = value
|
||||
|
||||
if filename is None:
|
||||
# this is a regular form field, so we read the value
|
||||
value = b''
|
||||
while True:
|
||||
v = await self._read_buffer(self.buffer_size)
|
||||
value += v
|
||||
if len(v) < self.buffer_size: # pragma: no branch
|
||||
break
|
||||
return name, value.decode()
|
||||
return name, FileUpload(filename, content_type, self._read_buffer)
|
||||
|
||||
async def _fill_buffer(self):
|
||||
self.buffer += await self.request.stream.read(
|
||||
self.buffer_size + self.extra_size - len(self.buffer))
|
||||
|
||||
async def _read_buffer(self, n=-1):
|
||||
data = b''
|
||||
while n == -1 or len(data) < n:
|
||||
await self._fill_buffer()
|
||||
s = self.buffer.split(self.boundary, 1)
|
||||
data += s[0][:n] if n != -1 else s[0]
|
||||
self.buffer = s[0][n:] if n != -1 else b''
|
||||
if len(s) == 2: # pragma: no branch
|
||||
# the end of this part is in the buffer
|
||||
if len(self.buffer) < 2:
|
||||
# we have read all the way to the end of this part
|
||||
data = data[:-(2 - len(self.buffer))] # remove last "\r\n"
|
||||
self.buffer += self.boundary + s[1]
|
||||
return data
|
||||
return data
|
||||
|
||||
|
||||
class FileUpload:
|
||||
"""Class that represents an uploaded file.
|
||||
|
||||
:param filename: the name of the uploaded file.
|
||||
:param content_type: the content type of the uploaded file.
|
||||
:param read: a coroutine that reads from the uploaded file's stream.
|
||||
|
||||
An uploaded file can be read from the stream using the :meth:`read()`
|
||||
method or saved to a file using the :meth:`save()` method.
|
||||
|
||||
Instances of this class do not normally need to be created directly.
|
||||
"""
|
||||
#: The size at which the file is copied to a temporary file.
|
||||
max_memory_size = 1024
|
||||
|
||||
def __init__(self, filename, content_type, read):
|
||||
self.filename = filename
|
||||
self.content_type = content_type
|
||||
self._read = read
|
||||
self._close = None
|
||||
|
||||
async def read(self, n=-1):
|
||||
"""Read up to ``n`` bytes from the uploaded file's stream.
|
||||
|
||||
:param n: the maximum number of bytes to read. If ``n`` is -1 or not
|
||||
given, the entire file is read.
|
||||
"""
|
||||
return await self._read(n)
|
||||
|
||||
async def save(self, path_or_file):
|
||||
"""Save the uploaded file to the given path or file object.
|
||||
|
||||
:param path_or_file: the path to save the file to, or a file object
|
||||
to which the file is to be written.
|
||||
|
||||
The file is read and written in chunks of size
|
||||
:attr:`FormDataIter.buffer_size`.
|
||||
"""
|
||||
if isinstance(path_or_file, str):
|
||||
f = open(path_or_file, 'wb')
|
||||
else:
|
||||
f = path_or_file
|
||||
while True:
|
||||
data = await self.read(FormDataIter.buffer_size)
|
||||
if not data:
|
||||
break
|
||||
f.write(data)
|
||||
if f != path_or_file:
|
||||
f.close()
|
||||
|
||||
async def copy(self, max_memory_size=None):
|
||||
"""Copy the uploaded file to a temporary file, to allow the parsing of
|
||||
the multipart form to continue.
|
||||
|
||||
:param max_memory_size: the maximum size of the file to keep in memory.
|
||||
If not given, then the class attribute of the
|
||||
same name is used.
|
||||
"""
|
||||
max_memory_size = max_memory_size or FileUpload.max_memory_size
|
||||
buffer = await self.read(max_memory_size)
|
||||
if len(buffer) < max_memory_size:
|
||||
f = AsyncBytesIO(buffer)
|
||||
self._read = f.read
|
||||
return self
|
||||
|
||||
# create a temporary file
|
||||
while True:
|
||||
tmpname = "".join([
|
||||
choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
|
||||
for _ in range(12)
|
||||
])
|
||||
try:
|
||||
f = open(tmpname, 'x+b')
|
||||
except OSError as e: # pragma: no cover
|
||||
if e.errno == 17:
|
||||
# EEXIST
|
||||
continue
|
||||
elif e.errno == 2:
|
||||
# ENOENT
|
||||
# some MicroPython platforms do not support mode "x"
|
||||
f = open(tmpname, 'w+b')
|
||||
if f.read(1) != b'':
|
||||
f.close()
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
break
|
||||
f.write(buffer)
|
||||
await self.save(f)
|
||||
f.seek(0)
|
||||
|
||||
async def read(n=-1):
|
||||
return f.read(n)
|
||||
|
||||
async def close():
|
||||
f.close()
|
||||
os.remove(tmpname)
|
||||
|
||||
self._read = read
|
||||
self._close = close
|
||||
return self
|
||||
|
||||
async def close(self):
|
||||
"""Close an open file.
|
||||
|
||||
This method must be called to free memory or temporary files created by
|
||||
the ``copy()`` method.
|
||||
|
||||
Note that when using the ``@with_form_data`` decorator this method is
|
||||
called automatically when the request ends.
|
||||
"""
|
||||
if self._close:
|
||||
await self._close()
|
||||
self._close = None
|
||||
|
||||
|
||||
def with_form_data(f):
|
||||
"""Decorator that parses a ``multipart/form-data`` body and updates the
|
||||
request object with the parsed form fields and files.
|
||||
|
||||
Example usage::
|
||||
|
||||
from microdot.multipart import with_form_data
|
||||
|
||||
@app.post('/upload')
|
||||
@with_form_data
|
||||
async def upload(request):
|
||||
print('form fields:', request.form)
|
||||
print('files:', request.files)
|
||||
|
||||
Note: this decorator calls the :meth:`FileUpload.copy()
|
||||
<microdot.multipart.FileUpload.copy>` method on all uploaded files, so that
|
||||
the request can be parsed in its entirety. The files are either copied to
|
||||
memory or a temporary file, depending on their size. The temporary files
|
||||
are automatically deleted when the request ends.
|
||||
"""
|
||||
@wraps(f)
|
||||
async def wrapper(request, *args, **kwargs):
|
||||
form = {}
|
||||
files = {}
|
||||
async for name, value in FormDataIter(request):
|
||||
if isinstance(value, FileUpload):
|
||||
files[name] = await value.copy()
|
||||
else:
|
||||
form[name] = value
|
||||
if form or files:
|
||||
request._form = form
|
||||
request._files = files
|
||||
try:
|
||||
ret = f(request, *args, **kwargs)
|
||||
if iscoroutine(ret):
|
||||
ret = await ret
|
||||
finally:
|
||||
if request.files:
|
||||
for file in request.files.values():
|
||||
await file.close()
|
||||
return ret
|
||||
return wrapper
|
||||
@@ -0,0 +1,155 @@
|
||||
import jwt
|
||||
from microdot.microdot import invoke_handler
|
||||
from microdot.helpers import wraps
|
||||
|
||||
|
||||
class SessionDict(dict):
|
||||
"""A session dictionary.
|
||||
|
||||
The session dictionary is a standard Python dictionary that has been
|
||||
extended with convenience ``save()`` and ``delete()`` methods.
|
||||
"""
|
||||
def __init__(self, request, session_dict):
|
||||
super().__init__(session_dict)
|
||||
self.request = request
|
||||
|
||||
def save(self):
|
||||
"""Update the session cookie."""
|
||||
self.request.app._session.update(self.request, self)
|
||||
|
||||
def delete(self):
|
||||
"""Delete the session cookie."""
|
||||
self.request.app._session.delete(self.request)
|
||||
|
||||
|
||||
class Session:
|
||||
"""
|
||||
:param app: The application instance.
|
||||
:param key: The secret key, as a string or bytes object.
|
||||
"""
|
||||
secret_key = None
|
||||
|
||||
def __init__(self, app=None, secret_key=None, cookie_options=None):
|
||||
self.secret_key = secret_key
|
||||
self.cookie_options = cookie_options or {}
|
||||
if app is not None:
|
||||
self.initialize(app)
|
||||
|
||||
def initialize(self, app, secret_key=None, cookie_options=None):
|
||||
if secret_key is not None:
|
||||
self.secret_key = secret_key
|
||||
if cookie_options is not None:
|
||||
self.cookie_options = cookie_options
|
||||
if 'path' not in self.cookie_options:
|
||||
self.cookie_options['path'] = '/'
|
||||
if 'http_only' not in self.cookie_options:
|
||||
self.cookie_options['http_only'] = True
|
||||
app._session = self
|
||||
|
||||
def get(self, request):
|
||||
"""Retrieve the user session.
|
||||
|
||||
:param request: The client request.
|
||||
|
||||
The return value is a session dictionary with the data stored in the
|
||||
user's session, or ``{}`` if the session data is not available or
|
||||
invalid.
|
||||
"""
|
||||
if not self.secret_key:
|
||||
raise ValueError('The session secret key is not configured')
|
||||
if hasattr(request.g, '_session'):
|
||||
return request.g._session
|
||||
session = request.cookies.get('session')
|
||||
if session is None:
|
||||
request.g._session = SessionDict(request, {})
|
||||
return request.g._session
|
||||
request.g._session = SessionDict(request, self.decode(session))
|
||||
return request.g._session
|
||||
|
||||
def update(self, request, session):
|
||||
"""Update the user session.
|
||||
|
||||
:param request: The client request.
|
||||
:param session: A dictionary with the update session data for the user.
|
||||
|
||||
Applications would normally not call this method directly, instead they
|
||||
would use the :meth:`SessionDict.save` method on the session
|
||||
dictionary, which calls this method. For example::
|
||||
|
||||
@app.route('/')
|
||||
@with_session
|
||||
def index(request, session):
|
||||
session['foo'] = 'bar'
|
||||
session.save()
|
||||
return 'Hello, World!'
|
||||
|
||||
Calling this method adds a cookie with the updated session to the
|
||||
request currently being processed.
|
||||
"""
|
||||
if not self.secret_key:
|
||||
raise ValueError('The session secret key is not configured')
|
||||
|
||||
encoded_session = self.encode(session)
|
||||
|
||||
@request.after_request
|
||||
def _update_session(request, response):
|
||||
response.set_cookie('session', encoded_session,
|
||||
**self.cookie_options)
|
||||
return response
|
||||
|
||||
def delete(self, request):
|
||||
"""Remove the user session.
|
||||
|
||||
:param request: The client request.
|
||||
|
||||
Applications would normally not call this method directly, instead they
|
||||
would use the :meth:`SessionDict.delete` method on the session
|
||||
dictionary, which calls this method. For example::
|
||||
|
||||
@app.route('/')
|
||||
@with_session
|
||||
def index(request, session):
|
||||
session.delete()
|
||||
return 'Hello, World!'
|
||||
|
||||
Calling this method adds a cookie removal header to the request
|
||||
currently being processed.
|
||||
"""
|
||||
@request.after_request
|
||||
def _delete_session(request, response):
|
||||
response.delete_cookie('session', **self.cookie_options)
|
||||
return response
|
||||
|
||||
def encode(self, payload, secret_key=None):
|
||||
return jwt.encode(payload, secret_key or self.secret_key,
|
||||
algorithm='HS256')
|
||||
|
||||
def decode(self, session, secret_key=None):
|
||||
try:
|
||||
payload = jwt.decode(session, secret_key or self.secret_key,
|
||||
algorithms=['HS256'])
|
||||
except jwt.exceptions.PyJWTError: # pragma: no cover
|
||||
return {}
|
||||
return payload
|
||||
|
||||
|
||||
def with_session(f):
|
||||
"""Decorator that passes the user session to the route handler.
|
||||
|
||||
The session dictionary is passed to the decorated function as an argument
|
||||
after the request object. Example::
|
||||
|
||||
@app.route('/')
|
||||
@with_session
|
||||
def index(request, session):
|
||||
return 'Hello, World!'
|
||||
|
||||
Note that the decorator does not save the session. To update the session,
|
||||
call the :func:`session.save() <microdot.session.SessionDict.save>` method.
|
||||
"""
|
||||
@wraps(f)
|
||||
async def wrapper(request, *args, **kwargs):
|
||||
return await invoke_handler(
|
||||
f, request, request.app._session.get(request), *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
+126
@@ -0,0 +1,126 @@
|
||||
import asyncio
|
||||
from microdot.helpers import wraps
|
||||
|
||||
try:
|
||||
import orjson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
|
||||
class SSE:
|
||||
"""Server-Sent Events object.
|
||||
|
||||
An object of this class is sent to handler functions to manage the SSE
|
||||
connection.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.event = asyncio.Event()
|
||||
self.queue = []
|
||||
|
||||
async def send(self, data, event=None, event_id=None):
|
||||
"""Send an event to the client.
|
||||
|
||||
:param data: the data to send. It can be given as a string, bytes, dict
|
||||
or list. Dictionaries and lists are serialized to JSON.
|
||||
Any other types are converted to string before sending.
|
||||
:param event: an optional event name, to send along with the data. If
|
||||
given, it must be a string.
|
||||
:param event_id: an optional event id, to send along with the data. If
|
||||
given, it must be a string.
|
||||
"""
|
||||
if isinstance(data, (dict, list)):
|
||||
data = json.dumps(data)
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
elif not isinstance(data, bytes):
|
||||
data = str(data).encode()
|
||||
data = b'data: ' + data + b'\n\n'
|
||||
if event_id:
|
||||
data = b'id: ' + event_id.encode() + b'\n' + data
|
||||
if event:
|
||||
data = b'event: ' + event.encode() + b'\n' + data
|
||||
self.queue.append(data)
|
||||
self.event.set()
|
||||
|
||||
|
||||
def sse_response(request, event_function, *args, **kwargs):
|
||||
"""Return a response object that initiates an event stream.
|
||||
|
||||
:param request: the request object.
|
||||
:param event_function: an asynchronous function that will send events to
|
||||
the client. The function is invoked with ``request``
|
||||
and an ``sse`` object. The function should use
|
||||
``sse.send()`` to send events to the client.
|
||||
:param args: additional positional arguments to be passed to the response.
|
||||
:param kwargs: additional keyword arguments to be passed to the response.
|
||||
|
||||
This is a low-level function that can be used to implement a custom SSE
|
||||
endpoint. In general the :func:`microdot.sse.with_sse` decorator should be
|
||||
used instead.
|
||||
"""
|
||||
sse = SSE()
|
||||
|
||||
async def sse_task_wrapper():
|
||||
try:
|
||||
await event_function(request, sse, *args, **kwargs)
|
||||
except asyncio.CancelledError: # pragma: no cover
|
||||
pass
|
||||
except Exception as exc:
|
||||
# the SSE task raised an exception so we need to pass it to the
|
||||
# main route so that it is re-raised there
|
||||
sse.queue.append(exc)
|
||||
sse.event.set()
|
||||
|
||||
task = asyncio.create_task(sse_task_wrapper())
|
||||
|
||||
class sse_loop:
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
event = None
|
||||
while sse.queue or not task.done():
|
||||
try:
|
||||
event = sse.queue.pop(0)
|
||||
break
|
||||
except IndexError:
|
||||
await sse.event.wait()
|
||||
sse.event.clear()
|
||||
if isinstance(event, Exception):
|
||||
# if the event is an exception we re-raise it here so that it
|
||||
# can be handled appropriately
|
||||
raise event
|
||||
elif event is None:
|
||||
raise StopAsyncIteration
|
||||
return event
|
||||
|
||||
async def aclose(self):
|
||||
task.cancel()
|
||||
|
||||
return sse_loop(), 200, {'Content-Type': 'text/event-stream'}
|
||||
|
||||
|
||||
def with_sse(f):
|
||||
"""Decorator to make a route a Server-Sent Events endpoint.
|
||||
|
||||
This decorator is used to define a route that accepts SSE connections. The
|
||||
route then receives a sse object as a second argument that it can use to
|
||||
send events to the client::
|
||||
|
||||
@app.route('/events')
|
||||
@with_sse
|
||||
async def events(request, sse):
|
||||
# send an unnamed event with string data
|
||||
await sse.send('hello')
|
||||
|
||||
# send an unnamed event with JSON data
|
||||
await sse.send({'foo': 'bar'})
|
||||
|
||||
# send a named event
|
||||
await sse.send('hello', event='greeting')
|
||||
"""
|
||||
@wraps(f)
|
||||
async def sse_handler(request, *args, **kwargs):
|
||||
return sse_response(request, f, *args, **kwargs)
|
||||
|
||||
return sse_handler
|
||||
@@ -0,0 +1,70 @@
|
||||
from utemplate import recompile
|
||||
|
||||
_loader = None
|
||||
|
||||
|
||||
class Template:
|
||||
"""A template object.
|
||||
|
||||
:param template: The filename of the template to render, relative to the
|
||||
configured template directory.
|
||||
"""
|
||||
@classmethod
|
||||
def initialize(cls, template_dir='templates',
|
||||
loader_class=recompile.Loader):
|
||||
"""Initialize the templating subsystem.
|
||||
|
||||
:param template_dir: the directory where templates are stored. This
|
||||
argument is optional. The default is to load
|
||||
templates from a *templates* subdirectory.
|
||||
:param loader_class: the ``utemplate.Loader`` class to use when loading
|
||||
templates. This argument is optional. The default
|
||||
is the ``recompile.Loader`` class, which
|
||||
automatically recompiles templates when they
|
||||
change.
|
||||
"""
|
||||
global _loader
|
||||
_loader = loader_class(None, template_dir)
|
||||
|
||||
def __init__(self, template):
|
||||
if _loader is None: # pragma: no cover
|
||||
self.initialize()
|
||||
#: The name of the template
|
||||
self.name = template
|
||||
self.template = _loader.load(template)
|
||||
|
||||
def generate(self, *args, **kwargs):
|
||||
"""Return a generator that renders the template in chunks, with the
|
||||
given arguments."""
|
||||
return self.template(*args, **kwargs)
|
||||
|
||||
def render(self, *args, **kwargs):
|
||||
"""Render the template with the given arguments and return it as a
|
||||
string."""
|
||||
return ''.join(self.generate(*args, **kwargs))
|
||||
|
||||
def generate_async(self, *args, **kwargs):
|
||||
"""Return an asynchronous generator that renders the template in
|
||||
chunks, using the given arguments."""
|
||||
class sync_to_async_iter():
|
||||
def __init__(self, iter):
|
||||
self.iter = iter
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
try:
|
||||
return next(self.iter)
|
||||
except StopIteration:
|
||||
raise StopAsyncIteration
|
||||
|
||||
return sync_to_async_iter(self.generate(*args, **kwargs))
|
||||
|
||||
async def render_async(self, *args, **kwargs):
|
||||
"""Render the template with the given arguments asynchronously and
|
||||
return it as a string."""
|
||||
response = ''
|
||||
async for chunk in self.generate_async(*args, **kwargs):
|
||||
response += chunk
|
||||
return response
|
||||
Binary file not shown.
@@ -0,0 +1,36 @@
|
||||
{% args c %}
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="refresh" content="30" />
|
||||
<style type="text/css">
|
||||
.towards {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
.message {
|
||||
border: 1px solid #cc0;
|
||||
background-color: #ff0;
|
||||
color: black;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<p>Last update: {{c["ura"]["gmtime"]}} GMT</p>
|
||||
{% for stop_id in c["stops"] %}
|
||||
{% set s = c["stops"][stop_id] %}
|
||||
<h1>{{s["name"]}}</h1>
|
||||
<div class="towards">(Towards: <b>{{s["towards"]}}</b>)</div>
|
||||
{% for m in s["messages"] %}
|
||||
<p class="message">{{m}}</p>
|
||||
{% endfor %}
|
||||
{% for line_name in s["lines"] %}
|
||||
<h3>{{line_name}}</h3>
|
||||
<ol>
|
||||
{% for pred in s["lines"][line_name] %}
|
||||
<li>{{pred["est_due"]}} ➡ {{pred["destination"]}}
|
||||
{% endfor %}
|
||||
</ol>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,149 @@
|
||||
import json
|
||||
import time
|
||||
import urequests
|
||||
|
||||
class TflCountdown:
|
||||
API_URL = "https://countdown.api.tfl.gov.uk/interfaces/ura/instant_V1"
|
||||
FIELD_NAMES = [
|
||||
# ResponseType 0 - Stop
|
||||
["ResponseType", "StopPointName", "StopID", "StopCode1", "StopCode2", "StopPointType", "Towards", "Bearing", "StopPointIndicator", "StopPointState", "Latitude", "Longitude"],
|
||||
# ResponseType 1 - Prediction
|
||||
["ResponseType", "StopPointName", "StopID", "StopCode1", "StopCode2", "StopPointType", "Towards", "Bearing", "StopPointIndicator", "StopPointState", "Latitude", "Longitude", "VisitNumber", "LineID", "LineName", "DirectionID", "DestinationText", "DestinationName", "VehicleID", "TripID", "RegistrationNumber", "EstimatedTime", "ExpireTime"],
|
||||
# ResponseType 2 - Flexible Message
|
||||
["ResponseType", "StopPointName", "StopID", "StopCode1", "StopCode2", "StopPointType", "Towards", "Bearing", "StopPointIndicator", "StopPointState", "Latitude", "Longitude", "MessageUUID", "MessageType", "MessagePriority", "MessageText", "StartTime", "ExpireTime"],
|
||||
# ResponseType 3 - Baseversion
|
||||
["ResponseType", "Version"],
|
||||
# ResponseType 4 - URA Version
|
||||
["ResponseType", "Version", "TimeStamp"]
|
||||
]
|
||||
|
||||
def __init__(self, api_key: str):
|
||||
self.api_key = api_key
|
||||
self.return_list = ["StopPointName", "StopID", "Towards", "LineName", "DestinationText", "EstimatedTime", "MessageText"]
|
||||
self.stop_ids = ["1597", "1598", "11333", "11334"]
|
||||
self.time_now = time.gmtime() * 1000
|
||||
|
||||
def get_countdown(self, stop_ids: list = ["1598", "11333"], line_ids: list = []):
|
||||
url = self.API_URL
|
||||
|
||||
params = {
|
||||
"StopAlso": "true",
|
||||
"ReturnList": ",".join(self.return_list),
|
||||
"StopID": ",".join(stop_ids),
|
||||
"LineID": ",".join(line_ids)
|
||||
}
|
||||
url += self.get_query(params)
|
||||
print(url)
|
||||
result = urequests.get(url)
|
||||
return result
|
||||
|
||||
def get_query(self, params: dict):
|
||||
query_str = "?"
|
||||
for k in params:
|
||||
if params[k] == "":
|
||||
continue
|
||||
query_str += f"&{k}={params[k]}"
|
||||
return query_str
|
||||
|
||||
def get_field(self, msg: list, field_name: str):
|
||||
resp_type = msg[0]
|
||||
try:
|
||||
full_fields = self.FIELD_NAMES[resp_type]
|
||||
except:
|
||||
print(f"Unknown ResponseType: {resp_type}")
|
||||
return None
|
||||
if resp_type in [4]:
|
||||
return_fields = full_fields
|
||||
else:
|
||||
return_fields = ["ResponseType"] + [f for f in self.return_list if f in full_fields]
|
||||
#print(repr(return_fields))
|
||||
#print(repr(msg))
|
||||
try:
|
||||
return_idx = return_fields.index(field_name)
|
||||
except:
|
||||
return None
|
||||
return msg[return_idx]
|
||||
|
||||
def strftime(self, time_tuple: tuple[int, ...], date: bool = True, time: bool = True):
|
||||
result = ""
|
||||
if date:
|
||||
result += f"{time_tuple[0]}-{time_tuple[1]:02}-{time_tuple[2]:02}"
|
||||
if time: result += " "
|
||||
if time:
|
||||
result += f"{time_tuple[3]:02}:{time_tuple[4]:02}:{time_tuple[5]:02}"
|
||||
return result
|
||||
|
||||
def strfstamp(self, gmstamp_ms: int, return_date: bool = True, return_time: bool = True):
|
||||
tm_obj = time.gmtime(gmstamp_ms/1000)
|
||||
return self.strftime(tm_obj, return_date, return_time)
|
||||
|
||||
def get_due(self, gmstamp_ms: int):
|
||||
diff = gmstamp_ms - self.time_now
|
||||
minutes = round(diff / 60000)
|
||||
return f"{minutes}min"
|
||||
|
||||
def parse_countdown(self, ctdn_response: str):
|
||||
"""
|
||||
https://content.tfl.gov.uk/tfl-live-bus-river-bus-arrivals-api-documentation.pdf
|
||||
|
||||
Optimised for a value of:
|
||||
"ReturnList": "StopPointName,StopID,LineName,DestinationText,EstimatedTime,MessageText"
|
||||
"""
|
||||
result = {}
|
||||
lines = ctdn_response.split("\r\n")
|
||||
for l in lines:
|
||||
ld = json.loads(l)
|
||||
print(repr(ld))
|
||||
if ld[0] == 0:
|
||||
# Stop record
|
||||
stop_id = self.get_field(ld, "StopID")
|
||||
if not "stops" in result:
|
||||
result["stops"] = {}
|
||||
result["stops"][stop_id] = {
|
||||
"id": stop_id,
|
||||
"name": self.get_field(ld, "StopPointName"),
|
||||
"towards": self.get_field(ld, "Towards"),
|
||||
"lines": {},
|
||||
"messages": []
|
||||
}
|
||||
elif ld[0] == 1:
|
||||
# Prediction record
|
||||
stop_id = self.get_field(ld, "StopID")
|
||||
line_no = self.get_field(ld, "LineName")
|
||||
destination = self.get_field(ld, "DestinationText")
|
||||
est_stamp = int(self.get_field(ld, "EstimatedTime"))
|
||||
if not line_no in result["stops"][stop_id]["lines"]:
|
||||
result["stops"][stop_id]["lines"][line_no] = []
|
||||
result["stops"][stop_id]["lines"][line_no].append({
|
||||
"destination": destination,
|
||||
"est_gmstamp_ms": est_stamp,
|
||||
"est_gmtime": self.strfstamp(est_stamp),
|
||||
"est_due": self.get_due(est_stamp)
|
||||
})
|
||||
elif ld[0] == 2:
|
||||
# Flexible Message record
|
||||
stop_id = self.get_field(ld, "StopID")
|
||||
msg = self.get_field(ld, "MessageText")
|
||||
result["stops"][stop_id]["messages"].append(msg)
|
||||
elif ld[0] == 3:
|
||||
# Baseversion record
|
||||
pass
|
||||
elif ld[0] == 4:
|
||||
# URA Version record
|
||||
ura_stamp = int(self.get_field(ld, "TimeStamp"))
|
||||
# Use as reference time - perfect for embedded systems without RTC
|
||||
self.time_now = ura_stamp
|
||||
result["ura"] = {
|
||||
"version": self.get_field(ld, "Version"),
|
||||
"gmstamp_ms": ura_stamp,
|
||||
"gmtime": self.strfstamp(ura_stamp)
|
||||
}
|
||||
else:
|
||||
print(f"Unsupported ResponseType: {ld[0]}")
|
||||
|
||||
# Sort arrivals by estimated time
|
||||
for stop_id in result["stops"]:
|
||||
for line_id in result["stops"][stop_id]["lines"]:
|
||||
result["stops"][stop_id]["lines"][line_id] = sorted(result["stops"][stop_id]["lines"][line_id], key=lambda x: x["est_gmstamp_ms"])
|
||||
|
||||
return result
|
||||
Binary file not shown.
@@ -0,0 +1,14 @@
|
||||
class Loader:
|
||||
|
||||
def __init__(self, pkg, dir):
|
||||
if dir == ".":
|
||||
dir = ""
|
||||
else:
|
||||
dir = dir.replace("/", ".") + "."
|
||||
if pkg and pkg != "__main__":
|
||||
dir = pkg + "." + dir
|
||||
self.p = dir
|
||||
|
||||
def load(self, name):
|
||||
name = name.replace(".", "_")
|
||||
return __import__(self.p + name, None, None, (name,)).render
|
||||
@@ -0,0 +1,21 @@
|
||||
# (c) 2014-2020 Paul Sokolovsky. MIT license.
|
||||
try:
|
||||
from uos import stat, remove
|
||||
except:
|
||||
from os import stat, remove
|
||||
from . import source
|
||||
|
||||
|
||||
class Loader(source.Loader):
|
||||
|
||||
def load(self, name):
|
||||
o_path = self.pkg_path + self.compiled_path(name)
|
||||
i_path = self.pkg_path + self.dir + "/" + name
|
||||
try:
|
||||
o_stat = stat(o_path)
|
||||
i_stat = stat(i_path)
|
||||
if i_stat[8] > o_stat[8]:
|
||||
# input file is newer, remove output to force recompile
|
||||
remove(o_path)
|
||||
finally:
|
||||
return super().load(name)
|
||||
@@ -0,0 +1,188 @@
|
||||
# (c) 2014-2019 Paul Sokolovsky. MIT license.
|
||||
from . import compiled
|
||||
|
||||
|
||||
class Compiler:
|
||||
|
||||
START_CHAR = "{"
|
||||
STMNT = "%"
|
||||
STMNT_END = "%}"
|
||||
EXPR = "{"
|
||||
EXPR_END = "}}"
|
||||
|
||||
def __init__(self, file_in, file_out, indent=0, seq=0, loader=None):
|
||||
self.file_in = file_in
|
||||
self.file_out = file_out
|
||||
self.loader = loader
|
||||
self.seq = seq
|
||||
self._indent = indent
|
||||
self.stack = []
|
||||
self.in_literal = False
|
||||
self.flushed_header = False
|
||||
self.args = "*a, **d"
|
||||
|
||||
def indent(self, adjust=0):
|
||||
if not self.flushed_header:
|
||||
self.flushed_header = True
|
||||
self.indent()
|
||||
self.file_out.write("def render%s(%s):\n" % (str(self.seq) if self.seq else "", self.args))
|
||||
self.stack.append("def")
|
||||
self.file_out.write(" " * (len(self.stack) + self._indent + adjust))
|
||||
|
||||
def literal(self, s):
|
||||
if not s:
|
||||
return
|
||||
if not self.in_literal:
|
||||
self.indent()
|
||||
self.file_out.write('yield """')
|
||||
self.in_literal = True
|
||||
self.file_out.write(s.replace('"', '\\"'))
|
||||
|
||||
def close_literal(self):
|
||||
if self.in_literal:
|
||||
self.file_out.write('"""\n')
|
||||
self.in_literal = False
|
||||
|
||||
def render_expr(self, e):
|
||||
self.indent()
|
||||
self.file_out.write('yield str(' + e + ')\n')
|
||||
|
||||
def parse_statement(self, stmt):
|
||||
tokens = stmt.split(None, 1)
|
||||
if tokens[0] == "args":
|
||||
if len(tokens) > 1:
|
||||
self.args = tokens[1]
|
||||
else:
|
||||
self.args = ""
|
||||
elif tokens[0] == "set":
|
||||
self.indent()
|
||||
self.file_out.write(stmt[3:].strip() + "\n")
|
||||
elif tokens[0] == "include":
|
||||
if not self.flushed_header:
|
||||
# If there was no other output, we still need a header now
|
||||
self.indent()
|
||||
tokens = tokens[1].split(None, 1)
|
||||
args = ""
|
||||
if len(tokens) > 1:
|
||||
args = tokens[1]
|
||||
if tokens[0][0] == "{":
|
||||
self.indent()
|
||||
# "1" as fromlist param is uPy hack
|
||||
self.file_out.write('_ = __import__(%s.replace(".", "_"), None, None, 1)\n' % tokens[0][2:-2])
|
||||
self.indent()
|
||||
self.file_out.write("yield from _.render(%s)\n" % args)
|
||||
return
|
||||
|
||||
with self.loader.input_open(tokens[0][1:-1]) as inc:
|
||||
self.seq += 1
|
||||
c = Compiler(inc, self.file_out, len(self.stack) + self._indent, self.seq)
|
||||
inc_id = self.seq
|
||||
self.seq = c.compile()
|
||||
self.indent()
|
||||
self.file_out.write("yield from render%d(%s)\n" % (inc_id, args))
|
||||
elif len(tokens) > 1:
|
||||
if tokens[0] == "elif":
|
||||
assert self.stack[-1] == "if"
|
||||
self.indent(-1)
|
||||
self.file_out.write(stmt + ":\n")
|
||||
else:
|
||||
self.indent()
|
||||
self.file_out.write(stmt + ":\n")
|
||||
self.stack.append(tokens[0])
|
||||
else:
|
||||
if stmt.startswith("end"):
|
||||
assert self.stack[-1] == stmt[3:]
|
||||
self.stack.pop(-1)
|
||||
elif stmt == "else":
|
||||
assert self.stack[-1] == "if"
|
||||
self.indent(-1)
|
||||
self.file_out.write("else:\n")
|
||||
else:
|
||||
assert False
|
||||
|
||||
def parse_line(self, l):
|
||||
while l:
|
||||
start = l.find(self.START_CHAR)
|
||||
if start == -1:
|
||||
self.literal(l)
|
||||
return
|
||||
self.literal(l[:start])
|
||||
self.close_literal()
|
||||
sel = l[start + 1]
|
||||
#print("*%s=%s=" % (sel, EXPR))
|
||||
if sel == self.STMNT:
|
||||
end = l.find(self.STMNT_END)
|
||||
assert end > 0
|
||||
stmt = l[start + len(self.START_CHAR + self.STMNT):end].strip()
|
||||
self.parse_statement(stmt)
|
||||
end += len(self.STMNT_END)
|
||||
l = l[end:]
|
||||
if not self.in_literal and l == "\n":
|
||||
break
|
||||
elif sel == self.EXPR:
|
||||
# print("EXPR")
|
||||
end = l.find(self.EXPR_END)
|
||||
assert end > 0
|
||||
expr = l[start + len(self.START_CHAR + self.EXPR):end].strip()
|
||||
self.render_expr(expr)
|
||||
end += len(self.EXPR_END)
|
||||
l = l[end:]
|
||||
else:
|
||||
self.literal(l[start])
|
||||
l = l[start + 1:]
|
||||
|
||||
def header(self):
|
||||
self.file_out.write("# Autogenerated file\n")
|
||||
|
||||
def compile(self):
|
||||
self.header()
|
||||
for l in self.file_in:
|
||||
self.parse_line(l)
|
||||
self.close_literal()
|
||||
return self.seq
|
||||
|
||||
|
||||
class Loader(compiled.Loader):
|
||||
|
||||
def __init__(self, pkg, dir):
|
||||
super().__init__(pkg, dir)
|
||||
self.dir = dir
|
||||
if pkg == "__main__":
|
||||
# if pkg isn't really a package, don't bother to use it
|
||||
# it means we're running from "filesystem directory", not
|
||||
# from a package.
|
||||
pkg = None
|
||||
|
||||
self.pkg_path = ""
|
||||
if pkg:
|
||||
p = __import__(pkg)
|
||||
if isinstance(p.__path__, str):
|
||||
# uPy
|
||||
self.pkg_path = p.__path__
|
||||
else:
|
||||
# CPy
|
||||
self.pkg_path = p.__path__[0]
|
||||
self.pkg_path += "/"
|
||||
|
||||
def input_open(self, template):
|
||||
path = self.pkg_path + self.dir + "/" + template
|
||||
return open(path)
|
||||
|
||||
def compiled_path(self, template):
|
||||
return self.dir + "/" + template.replace(".", "_") + ".py"
|
||||
|
||||
def load(self, name):
|
||||
try:
|
||||
return super().load(name)
|
||||
except (OSError, ImportError):
|
||||
pass
|
||||
|
||||
compiled_path = self.pkg_path + self.compiled_path(name)
|
||||
|
||||
f_in = self.input_open(name)
|
||||
f_out = open(compiled_path, "w")
|
||||
c = Compiler(f_in, f_out, loader=self)
|
||||
c.compile()
|
||||
f_in.close()
|
||||
f_out.close()
|
||||
return super().load(name)
|
||||
Reference in New Issue
Block a user