You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ponymail.apache.org by hu...@apache.org on 2020/09/06 17:34:48 UTC

[incubator-ponymail-foal] 03/15: Add minimal server package for UI backend

This is an automated email from the ASF dual-hosted git repository.

humbedooh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-ponymail-foal.git

commit bcd7e32dab9735255a25ed9456ac761e095f1c86
Author: Daniel Gruno <hu...@apache.org>
AuthorDate: Sun Sep 6 19:13:29 2020 +0200

    Add minimal server package for UI backend
    
    This doesn't have any URL endpoints yet.
    This is very much a work in progress.
---
 server/main.py                  | 175 ++++++++++++++++++++++++++++++++++
 server/plugins/__init__.py      |   1 +
 server/plugins/aaa.py           |  29 ++++++
 server/plugins/background.py    | 204 ++++++++++++++++++++++++++++++++++++++++
 server/plugins/configuration.py |  57 +++++++++++
 server/plugins/database.py      |  73 ++++++++++++++
 server/plugins/formdata.py      |  65 +++++++++++++
 server/plugins/offloader.py     |  43 +++++++++
 server/plugins/server.py        |  25 +++++
 server/plugins/session.py       | 111 ++++++++++++++++++++++
 server/ponymail.yaml.example    |  15 +++
 11 files changed, 798 insertions(+)

diff --git a/server/main.py b/server/main.py
new file mode 100644
index 0000000..cec6dad
--- /dev/null
+++ b/server/main.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Apache Pony Mail, Codename Foal - A Python variant of Pony Mail"""
+import argparse
+import asyncio
+import importlib
+import json
+import os
+import sys
+import traceback
+
+import aiohttp.web
+import yaml
+
+import plugins.background
+import plugins.configuration
+import plugins.database
+import plugins.formdata
+import plugins.offloader
+import plugins.server
+import plugins.session
+
+PONYMAIL_FOAL_VERSION = "0.1.0"
+
+
+class Server(plugins.server.BaseServer):
+    """Main server class, responsible for handling requests and scheduling offloader threads """
+
+    def __init__(self, args: argparse.Namespace):
+        print(
+            "==== Apache Pony Mail (Foal v/%s) starting... ====" % PONYMAIL_FOAL_VERSION
+        )
+        # Load configuration
+        yml = yaml.safe_load(open(args.config))
+        self.config = plugins.configuration.Configuration(yml)
+        self.data = plugins.configuration.InterData()
+        self.handlers = dict()
+        self.dbpool = asyncio.Queue()
+        self.runners = plugins.offloader.ExecutorPool(threads=10)
+        self.server = None
+
+        # Make a pool of 15 database connections for async queries
+        for n in range(1, 15):
+            self.dbpool.put_nowait(plugins.database.Database(self.config.database))
+
+        # Load each URL endpoint
+        for endpoint_file in os.listdir("endpoints"):
+            if endpoint_file.endswith(".py"):
+                endpoint = endpoint_file[:-3]
+                m = importlib.import_module(f"endpoints.{endpoint}")
+                if hasattr(m, "register"):
+                    self.handlers[endpoint] = m.__getattribute__("register")(self)
+                    print(f"Registered endpoint /api/{endpoint}")
+                else:
+                    print(
+                        f"Could not find entry point 'register()' in {endpoint_file}, skipping!"
+                    )
+
+    async def handle_request(
+        self, request: aiohttp.web.BaseRequest
+    ) -> aiohttp.web.Response:
+        """Generic handler for all incoming HTTP requests"""
+        resp: aiohttp.web.Response
+
+        # Define response headers first...
+        headers = {
+            "Server": "PyPony/%s" % PONYMAIL_FOAL_VERSION,
+        }
+
+        # Figure out who is going to handle this request, if any
+        # We are backwards compatible with the old Lua interface URLs
+        body_type = "form"
+        handler = request.path.split("/")[-1]
+        if handler.endswith(".lua"):
+            body_type = "form"
+            handler = handler[:-4]
+        if handler.endswith(".json"):
+            body_type = "json"
+            handler = handler[:-5]
+
+        # Parse form data if any
+        try:
+            indata = await plugins.formdata.parse_formdata(body_type, request)
+        except ValueError as e:
+            return aiohttp.web.Response(headers=headers, status=400, text=str(e))
+
+        # Find a handler, or 404
+        if handler in self.handlers:
+            session = await plugins.session.get_session(self, request)
+            try:
+                # Wait for endpoint response. This is typically JSON in case of success,
+                # but could be an exception (that needs a traceback) OR
+                # it could be a custom response, which we just pass along to the client.
+                output = await self.handlers[handler].exec(self, session, indata)
+                if session.database:
+                    self.dbpool.put_nowait(session.database)
+                    self.dbpool.task_done()
+                    session.database = None
+                headers["content-type"] = "application/json"
+                if output and not isinstance(output, aiohttp.web.Response):
+                    jsout = await self.runners.run(json.dumps, output, indent=2)
+                    headers["Content-Length"] = str(len(jsout))
+                    return aiohttp.web.Response(headers=headers, status=200, text=jsout)
+                elif isinstance(output, aiohttp.web.Response):
+                    return output
+                else:
+                    return aiohttp.web.Response(
+                        headers=headers, status=404, text="Content not found"
+                    )
+            except:
+                if session.database:
+                    self.dbpool.put_nowait(session.database)
+                    self.dbpool.task_done()
+                    session.database = None
+                exc_type, exc_value, exc_traceback = sys.exc_info()
+                err = "\n".join(
+                    traceback.format_exception(exc_type, exc_value, exc_traceback)
+                )
+                return aiohttp.web.Response(
+                    headers=headers, status=500, text="API error occurred: " + err
+                )
+        else:
+            return aiohttp.web.Response(
+                headers=headers, status=404, text="API Endpoint not found!"
+            )
+
+    async def server_loop(self, loop: asyncio.AbstractEventLoop):  # Note, loop never used.
+        self.server = aiohttp.web.Server(self.handle_request)
+        runner = aiohttp.web.ServerRunner(self.server)
+        await runner.setup()
+        site = aiohttp.web.TCPSite(
+            runner, self.config.server.ip, self.config.server.port
+        )
+        await site.start()
+        print(
+            "==== Serving up Pony goodness at %s:%s ===="
+            % (self.config.server.ip, self.config.server.port)
+        )
+        await plugins.background.run_tasks(self)
+
+    def run(self):
+        loop = asyncio.get_event_loop()
+        try:
+            loop.run_until_complete(self.server_loop(loop))
+        except KeyboardInterrupt:
+            pass
+        loop.close()
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "--config",
+        help="Configuration file to load (default: ponymail.yaml)",
+        default="ponymail.yaml",
+    )
+    cliargs = parser.parse_args()
+    pubsub_server = Server(cliargs)
+    pubsub_server.run()
diff --git a/server/plugins/__init__.py b/server/plugins/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/server/plugins/__init__.py
@@ -0,0 +1 @@
+
diff --git a/server/plugins/aaa.py b/server/plugins/aaa.py
new file mode 100644
index 0000000..d2722a2
--- /dev/null
+++ b/server/plugins/aaa.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This is the AAA library for Pony Mail codename Foal
+It handles rights management for lists.
+"""
+
+
+def can_access_email(session, email):
+    return True
+
+
+def can_access_list(session, listid):
+    return False
diff --git a/server/plugins/background.py b/server/plugins/background.py
new file mode 100644
index 0000000..fdd6d97
--- /dev/null
+++ b/server/plugins/background.py
@@ -0,0 +1,204 @@
+import asyncio
+import datetime
+import re
+import sys
+import time
+
+from elasticsearch import AsyncElasticsearch
+from elasticsearch.helpers import async_scan
+from elasticsearch_dsl import Search
+
+import plugins.configuration
+import plugins.server
+
+PYPONY_RE_PREFIX = re.compile(r"^([a-zA-Z]+:\s*)+")
+
+
+class ProgTimer:
+    start: float
+    title: str
+
+    def __init__(self, title):
+        self.title = title
+
+    async def __aenter__(self):
+        sys.stdout.write(
+            "[%s] %s..." % (datetime.datetime.now().strftime("%H:%M:%S"), self.title)
+        )
+        self.start = time.time()
+
+    async def __aexit__(self, exc_type, exc_val, exc_tb):
+        print("Done in %.2f seconds" % (time.time() - self.start))
+
+
+async def get_lists(database: plugins.configuration.DBConfig) -> dict:
+    """
+
+    :param database: a Pony Mail database configuration
+    :return: A dictionary of all mailing lists found, and whether they are considered
+             public or private
+    """
+    lists = {}
+    client = AsyncElasticsearch(
+        [
+            {
+                "host": database.hostname,
+                "port": database.port,
+                "url_prefix": database.db_prefix,
+                "use_ssl": database.secure,
+            },
+        ]
+    )
+
+    # Fetch aggregations of all public emails
+    s = Search(using=client, index=database.db_prefix + "-mbox").query(
+        "match", private=False
+    )
+    s.aggs.bucket("per_list", "terms", field="list_raw")
+
+    res = await client.search(
+        index=database.db_prefix + "-mbox", body=s.to_dict(), size=0
+    )
+
+    for ml in res["aggregations"]["per_list"]["buckets"]:
+        list_name = ml["key"].strip("<>").replace(".", "@", 1)
+        lists[list_name] = {
+            "count": ml["doc_count"],
+            "private": False,
+        }
+
+    # Ditto, for private emails
+    s = Search(using=client, index=database.db_prefix + "-mbox").query(
+        "match", private=True
+    )
+    s.aggs.bucket("per_list", "terms", field="list_raw")
+
+    res = await client.search(
+        index=database.db_prefix + "-mbox", body=s.to_dict(), size=0
+    )
+
+    for ml in res["aggregations"]["per_list"]["buckets"]:
+        list_name = ml["key"].strip("<>").replace(".", "@", 1)
+        lists[list_name] = {
+            "count": ml["doc_count"],
+            "private": True,
+        }
+    await client.close()
+
+    return lists
+
+
+async def get_public_activity(database: plugins.configuration.DBConfig) -> dict:
+    """
+
+    :param database: a PyPony database configuration
+    :return: A dictionary with activity stats
+    """
+    client = AsyncElasticsearch(
+        [
+            {
+                "host": database.hostname,
+                "port": database.port,
+                "url_prefix": database.db_prefix,
+                "use_ssl": database.secure,
+            },
+        ]
+    )
+
+    # Fetch aggregations of all public emails
+    s = (
+        Search(using=client, index=database.db_prefix + "-mbox")
+        .query("match", private=False)
+        .filter("range", date={"lt": "now+1d", "gt": "now-14d"})
+    )
+
+    s.aggs.bucket("number_of_lists", "cardinality", field="list_raw")
+    s.aggs.bucket("number_of_senders", "cardinality", field="from_raw")
+    s.aggs.bucket(
+        "daily_emails", "date_histogram", field="date", calendar_interval="1d"
+    )
+
+    res = await client.search(
+        index=database.db_prefix + "-mbox", body=s.to_dict(), size=0
+    )
+
+    no_emails = res["hits"]["total"]["value"]
+    no_lists = res["aggregations"]["number_of_lists"]["value"]
+    no_senders = res["aggregations"]["number_of_senders"]["value"]
+    daily_emails = []
+    for entry in res["aggregations"]["daily_emails"]["buckets"]:
+        daily_emails.append((entry["key"], entry["doc_count"]))
+
+    # Now the nitty gritty thread count
+    seen_emails = {}
+    seen_topics = []
+    thread_count = 0
+
+    s = (
+        Search(using=client, index=database.db_prefix + "-mbox")
+        .query("match", private=False)
+        .filter("range", date={"lt": "now+1d", "gt": "now-14d"})
+    )
+    async for doc in async_scan(
+        index=database.db_prefix + "-mbox",
+        client=client,
+        query=s.to_dict(),
+        _source_includes=[
+            "message-id",
+            "in-reply-to",
+            "subject",
+            "references",
+            "epoch",
+            "list_raw",
+        ],
+    ):
+
+        found = False
+        message_id = doc["_source"].get("message-id")
+        irt = doc["_source"].get("in-reply-to")
+        references = doc["_source"].get("references")
+        list_raw = doc["_source"].get("list_raw", "_")
+        subject = doc["_source"].get("subject", "_")
+        if irt and irt in seen_emails:
+            seen_emails[message_id] = irt
+            found = True
+        elif references:
+            for refid in re.split(r"\s+", references):
+                if refid in seen_emails:
+                    seen_emails[message_id] = refid
+                    found = True
+        if not found:
+            subject = PYPONY_RE_PREFIX.sub("", subject)
+            subject += list_raw
+            if subject in seen_topics:
+                seen_emails[message_id] = subject
+            else:
+                seen_topics.append(subject)
+                thread_count += 1
+
+    await client.close()
+
+    activity = {
+        "hits": no_emails,
+        "no_threads": thread_count,
+        "no_active_lists": no_lists,
+        "participants": no_senders,
+        "activity": daily_emails,
+    }
+
+    return activity
+
+
+async def run_tasks(server: plugins.server.BaseServer):
+    """
+        Runs long-lived background data gathering tasks such as gathering statistics about email activity and the list
+        of archived mailing lists, for populating the pony mail main index.
+
+        Generally runs every 2½ minutes, or whatever is set in tasks/refresh_rate in ponymail.yaml
+    """
+    while True:
+        async with ProgTimer("Gathering list of archived mailing lists"):
+            server.data.lists = await get_lists(server.config.database)
+        async with ProgTimer("Gathering bi-weekly activity stats"):
+            server.data.activity = await get_public_activity(server.config.database)
+        await asyncio.sleep(server.config.tasks.refresh_rate)
diff --git a/server/plugins/configuration.py b/server/plugins/configuration.py
new file mode 100644
index 0000000..65e5db4
--- /dev/null
+++ b/server/plugins/configuration.py
@@ -0,0 +1,57 @@
+class ServerConfig:
+    port: int
+    ip: str
+
+    def __init__(self, subyaml: dict):
+        self.ip = subyaml.get("bind", "0.0.0.0")
+        self.port = int(subyaml.get("port", 8080))
+
+
+class TaskConfig:
+    refresh_rate: int
+
+    def __init__(self, subyaml: dict):
+        self.refresh_rate = int(subyaml.get("refresh_rate", 150))
+
+
+class DBConfig:
+    hostname: str
+    port: int
+    secure: bool
+    url_prefix: str
+    db_prefix: str
+    max_hits: int
+
+    def __init__(self, subyaml: dict):
+        self.hostname = str(subyaml.get("server", "localhost"))
+        self.port = int(subyaml.get("port", 9200))
+        self.secure = bool(subyaml.get("secure", False))
+        self.url_prefix = subyaml.get("url_prefix", "")
+        self.db_prefix = str(subyaml.get("db_prefix", "ponymail"))
+        self.max_hits = int(subyaml.get("max_hits", 5000))
+
+
+class Configuration:
+    server: ServerConfig
+    database: DBConfig
+    tasks: TaskConfig
+
+    def __init__(self, yml: dict):
+        self.server = ServerConfig(yml.get("server", {}))
+        self.database = DBConfig(yml.get("database", {}))
+        self.tasks = TaskConfig(yml.get("tasks", {}))
+
+
+class InterData:
+    """
+        A mix of various global variables used throughout processes
+    """
+
+    lists: dict
+    sessions: dict
+    activity: dict
+
+    def __init__(self):
+        self.lists = {}
+        self.sessions = {}
+        self.activity = {}
diff --git a/server/plugins/database.py b/server/plugins/database.py
new file mode 100644
index 0000000..0739ee7
--- /dev/null
+++ b/server/plugins/database.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This is the Database library stub for Pony Mail codename Foal
+"""
+
+import uuid
+
+import elasticsearch
+
+import plugins.configuration
+import plugins.defuzzer
+
+
+class DBNames:
+    def __init__(self, dbprefix):
+        self.mbox = f"{dbprefix}-mbox"
+        self.source = f"{dbprefix}-source"
+        self.attachment = f"{dbprefix}-attachment"
+        self.account = f"{dbprefix}-account"
+        self.session = f"{dbprefix}-session"
+        self.notification = f"{dbprefix}-notification"
+
+
+DBError = elasticsearch.ElasticsearchException
+
+
+class Database:
+    client: elasticsearch.AsyncElasticsearch
+    config: plugins.configuration.DBConfig
+    dbs: DBNames
+
+    def __init__(self, config: plugins.configuration.DBConfig):
+        self.config = config
+        self.uuid = str(uuid.uuid4())
+        self.dbs = DBNames(config.db_prefix)
+        self.client = elasticsearch.AsyncElasticsearch(
+            [
+                {
+                    "host": config.hostname,
+                    "port": config.port,
+                    "url_prefix": config.db_prefix,
+                    "use_ssl": config.secure,
+                },
+            ]
+        )
+
+    async def search(self, index="", **kwargs):
+        if not index:
+            index = self.dbs.mbox
+        res = await self.client.search(index=index, **kwargs)
+        return res
+
+    async def get(self, index="", **kwargs):
+        if not index:
+            index = self.dbs.mbox
+        res = await self.client.get(index=index, **kwargs)
+        return res
diff --git a/server/plugins/formdata.py b/server/plugins/formdata.py
new file mode 100644
index 0000000..fe58840
--- /dev/null
+++ b/server/plugins/formdata.py
@@ -0,0 +1,65 @@
+import io
+import json
+import urllib.parse
+
+import aiohttp.web
+import multipart
+
+PYPONY_MAX_PAYLOAD = 256 * 1024
+
+
+async def parse_formdata(body_type, request: aiohttp.web.BaseRequest) -> dict:
+    indata = {}
+    for key, val in urllib.parse.parse_qsl(request.query_string):
+        indata[key] = val
+    # PUT/POST form data?
+    if request.method in ["PUT", "POST"]:
+        if request.can_read_body:
+            try:
+                if (
+                    request.content_length
+                    and request.content_length > PYPONY_MAX_PAYLOAD
+                ):
+                    raise ValueError("Form data payload too large, max 256kb allowed")
+                body = await request.text()
+                if body_type == "json":
+                    try:
+                        js = json.loads(body)
+                        assert isinstance(
+                            js, dict
+                        )  # json data MUST be an dictionary object, {...}
+                        indata.update(js)
+                    except ValueError:
+                        raise ValueError("Erroneous payload received")
+                elif body_type == "form":
+                    if (
+                        request.headers.get("content-type", "").lower()
+                        == "application/x-www-form-urlencoded"
+                    ):
+                        try:
+                            for key, val in urllib.parse.parse_qsl(body):
+                                indata[key] = val
+                        except ValueError:
+                            raise ValueError("Erroneous payload received")
+                    # If multipart, turn our body into a BytesIO object and use multipart on it
+                    elif (
+                        "multipart/form-data"
+                        in request.headers.get("content-type", "").lower()
+                    ):
+                        fh = request.headers.get("content-type")
+                        fb = fh.find("boundary=")
+                        if fb > 0:
+                            boundary = fh[fb + 9 :]
+                            if boundary:
+                                try:
+                                    for part in multipart.MultipartParser(
+                                        io.BytesIO(body.encode("utf-8")),
+                                        boundary,
+                                        len(body),
+                                    ):
+                                        indata[part.name] = part.value
+                                except ValueError:
+                                    raise ValueError("Erroneous payload received")
+            finally:
+                pass
+    return indata
diff --git a/server/plugins/offloader.py b/server/plugins/offloader.py
new file mode 100644
index 0000000..4a6b95e
--- /dev/null
+++ b/server/plugins/offloader.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Offloading library for pushing heavy tasks to sub threads"""
+
+import asyncio
+import concurrent.futures
+
+DEBUG = False
+
+
+class ExecutorPool:
+    def __init__(self, threads=10):
+        self.threads = concurrent.futures.ProcessPoolExecutor(max_workers=threads)
+
+    async def run(self, func, *args, **kwargs):
+        if DEBUG:
+            print("[Runner] initiating runner")
+        runner = self.threads.submit(func, *args, **kwargs)
+        if DEBUG:
+            print("[Runner] Waiting for task %r to finish" % func)
+        while runner.running():
+            await asyncio.sleep(0.01)
+        rv = runner.result()
+        if DEBUG:
+            print("[Runner] Done with task %r, put runner back in queue" % func)
+        if isinstance(rv, BaseException):
+            raise rv
+        return rv
diff --git a/server/plugins/server.py b/server/plugins/server.py
new file mode 100644
index 0000000..ae2a775
--- /dev/null
+++ b/server/plugins/server.py
@@ -0,0 +1,25 @@
+import asyncio
+import typing
+
+import aiohttp
+from elasticsearch import AsyncElasticsearch
+
+import plugins.configuration
+
+
+class Endpoint:
+    exec: typing.Callable
+
+    def __init__(self, executor):
+        self.exec = executor
+
+
+class BaseServer:
+    """Main server class, base def"""
+
+    config: plugins.configuration.Configuration
+    server: aiohttp.web.Server
+    data: plugins.configuration.InterData
+    handlers: typing.Dict[str, Endpoint]
+    database: AsyncElasticsearch
+    dbpool: asyncio.Queue
diff --git a/server/plugins/session.py b/server/plugins/session.py
new file mode 100644
index 0000000..9f6c4d3
--- /dev/null
+++ b/server/plugins/session.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is the user session handler for PyPony"""
+
+import http.cookies
+import time
+import typing
+import uuid
+
+import aiohttp.web
+
+import plugins.database
+import plugins.server
+
+PYPONY_MAX_SESSION_AGE = 86400 * 7  # Max 1 week between visits before voiding a session
+
+
+class SessionCredentials:
+    uid: str
+    name: str
+    email: str
+    provider: str
+    authoritative: bool
+    admin: bool
+
+    def __init__(self, doc: typing.Dict = None):
+        if doc:
+            pass
+        else:
+            self.uid = ""
+            self.name = ""
+            self.email = ""
+            self.provider = "generic"
+            self.authoritative = False
+            self.admin = False
+
+
+class SessionObject:
+    uid: str
+    created: int
+    last_accessed: int
+    credentials: SessionCredentials
+    database: typing.Optional[plugins.database.Database]
+
+    def __init__(self, server: plugins.server.BaseServer, doc=None):
+        self.database = None
+        if not doc:
+            now = int(time.time())
+            self.created = now
+            self.last_accessed = now
+            self.credentials = SessionCredentials()
+            self.uid = str(uuid.uuid4())
+        else:
+            self.created = doc["created"]
+            self.last_accessed = doc["last_accessed"]
+            self.credentials = SessionCredentials(doc["credentials"])
+            self.uid = doc["uid"]
+
+
+async def get_session(
+    server: plugins.server.BaseServer, request: aiohttp.web.BaseRequest
+    ) -> SessionObject:
+    session_id = None
+    session = None
+    if request.headers.get("cookie"):
+        for cookie_header in request.headers.getall("cookie"):
+            cookies: http.cookies.SimpleCookie = http.cookies.SimpleCookie(
+                cookie_header
+            )
+            if "ponymail" in cookies:
+                session_id = cookies["ponymail"].value
+                break
+
+    if session_id in server.data.sessions:
+        x_session = server.data.sessions[session_id]
+        now = int(time.time())
+        if (now - x_session.last_accessed) > PYPONY_MAX_SESSION_AGE:
+            del server.data.sessions[session_id]
+        else:
+            session = x_session
+    if not session:
+        session = SessionObject(server)
+    session.database = await server.dbpool.get()
+    return session
+
+
+async def set_session(server: plugins.server.BaseServer, **credentials):
+    """Create a new user session in the database"""
+    session_id = str(uuid.uuid4())
+    cookie: http.cookies.SimpleCookie = http.cookies.SimpleCookie()
+    cookie["ponymail"] = session_id
+    session = SessionObject(server)
+    session.credentials = SessionCredentials(credentials)
+    server.data.sessions[session_id] = session
+
+    return cookie.output()
diff --git a/server/ponymail.yaml.example b/server/ponymail.yaml.example
new file mode 100644
index 0000000..b0e06cc
--- /dev/null
+++ b/server/ponymail.yaml.example
@@ -0,0 +1,15 @@
+server:
+  port: 8080             # Port to bind to
+  bind: 127.0.0.1        # IP to bind to - typically 127.0.0.1 for localhost or 0.0.0.0 for all IPs
+
+
+database:
+  server: localhost      # The hostname of the ElasticSearch database
+  port: 9200             # ES Port
+  secure: false          # Whether TLS is enabled on ES
+  url_prefix: ~          # URL prefix, if proxying to ES
+  db_prefix: ponymail    # DB prefix, usually 'ponymail'
+  max_hits: 15000        # Maximum number of emails to process in a search
+
+tasks:
+  refresh_rate:  150     # Background indexer run interval, in seconds