You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ponymail.apache.org by se...@apache.org on 2021/12/13 17:38:31 UTC

[incubator-ponymail-foal] branch master updated (a1f8627 -> 1cb9b42)

This is an automated email from the ASF dual-hosted git repository.

sebb pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-ponymail-foal.git.


    from a1f8627  Tweak tab design
     new 475e3a7  Rename to agree with elastic/Elastic class
     new 1cb9b42  Update version

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 server/endpoints/email.py    |  2 +-
 server/endpoints/gravatar.py |  2 +-
 server/endpoints/mgmt.py     | 18 +++++++++---------
 server/plugins/auditlog.py   |  6 +++---
 server/plugins/background.py | 20 ++++++++++----------
 server/plugins/database.py   | 24 ++++++++++++------------
 server/plugins/messages.py   | 10 +++++-----
 server/plugins/session.py    | 12 ++++++------
 server/server_version.py     |  2 +-
 9 files changed, 48 insertions(+), 48 deletions(-)

[incubator-ponymail-foal] 02/02: Update version

Posted by se...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sebb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-ponymail-foal.git

commit 1cb9b42c7b9a32ed2a7087113181320f4c3d1002
Author: Sebb <se...@apache.org>
AuthorDate: Mon Dec 13 17:37:28 2021 +0000

    Update version
---
 server/server_version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/server/server_version.py b/server/server_version.py
index a6ba228..53f7747 100644
--- a/server/server_version.py
+++ b/server/server_version.py
@@ -1,2 +1,2 @@
 # This file is generated by server/update_version.sh
-PONYMAIL_SERVER_VERSION = 'f1dce71'
+PONYMAIL_SERVER_VERSION = 'a953120'

[incubator-ponymail-foal] 01/02: Rename to agree with elastic/Elastic class

Posted by se...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sebb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-ponymail-foal.git

commit 475e3a7adfb6f1812111cef42452b8c60719e644
Author: Sebb <se...@apache.org>
AuthorDate: Mon Dec 13 17:37:14 2021 +0000

    Rename to agree with elastic/Elastic class
    
    Also, the prefix makes it easier to distinguish db index references
---
 server/endpoints/email.py    |  2 +-
 server/endpoints/gravatar.py |  2 +-
 server/endpoints/mgmt.py     | 18 +++++++++---------
 server/plugins/auditlog.py   |  6 +++---
 server/plugins/background.py | 20 ++++++++++----------
 server/plugins/database.py   | 24 ++++++++++++------------
 server/plugins/messages.py   | 10 +++++-----
 server/plugins/session.py    | 12 ++++++------
 8 files changed, 47 insertions(+), 47 deletions(-)

diff --git a/server/endpoints/email.py b/server/endpoints/email.py
index 1ff6fdd..22f2343 100644
--- a/server/endpoints/email.py
+++ b/server/endpoints/email.py
@@ -67,7 +67,7 @@ async def process(
                         try:
                             assert session.database, "Database not connected!"
                             attachment = await session.database.get(
-                                index=session.database.dbs.attachment, id=indata.get("file")
+                                index=session.database.dbs.db_attachment, id=indata.get("file")
                             )
                             if attachment:
                                 blob = base64.decodebytes(attachment["_source"].get("source").encode("utf-8"))
diff --git a/server/endpoints/gravatar.py b/server/endpoints/gravatar.py
index 6602596..c0b1cf9 100644
--- a/server/endpoints/gravatar.py
+++ b/server/endpoints/gravatar.py
@@ -44,7 +44,7 @@ async def fetch_gravatar(gid):
 
 async def gravatar_exists_in_db(session, gid):
     res = await session.database.search(
-        index=session.database.dbs.mbox,
+        index=session.database.dbs.db_mbox,
         size=1,
         body={"query": {"bool": {"must": [{"term": {"gravatar": gid}}]}}},
     )
diff --git a/server/endpoints/mgmt.py b/server/endpoints/mgmt.py
index 6ac0d67..28335ea 100644
--- a/server/endpoints/mgmt.py
+++ b/server/endpoints/mgmt.py
@@ -58,14 +58,14 @@ async def process(
                 email["deleted"] = True
                 if server.config.ui.fully_delete and email["id"] and email["dbid"]:  # Full on GDPR blast?
                     await session.database.delete(
-                        index=session.database.dbs.mbox, id=email["id"],
+                        index=session.database.dbs.db_mbox, id=email["id"],
                     )
                     await session.database.delete(
-                        index=session.database.dbs.source, id=email["dbid"],
+                        index=session.database.dbs.db_source, id=email["dbid"],
                     )
                 else:  # Standard behavior: hide the email from everyone.
                     await session.database.update(
-                        index=session.database.dbs.mbox, body={"doc": email}, id=email["id"],
+                        index=session.database.dbs.db_mbox, body={"doc": email}, id=email["id"],
                     )
                 lid = email.get("list_raw", "??")
                 await plugins.auditlog.add_entry(session, action="delete", target=doc, lid=lid, log=f"Removed email {doc} from {lid} archives")
@@ -80,7 +80,7 @@ async def process(
             if email and isinstance(email, dict) and plugins.aaa.can_access_email(session, email):
                 email["deleted"] = True
                 await session.database.update(
-                    index=session.database.dbs.mbox, body={"doc": email}, id=email["id"],
+                    index=session.database.dbs.db_mbox, body={"doc": email}, id=email["id"],
                 )
                 lid = email.get("list_raw", "??")
                 await plugins.auditlog.add_entry(session, action="hide", target=doc, lid=lid, log=f"Hid email {doc} from {lid} archives")
@@ -95,7 +95,7 @@ async def process(
             if email and isinstance(email, dict) and plugins.aaa.can_access_email(session, email):
                 email["deleted"] = False
                 await session.database.update(
-                    index=session.database.dbs.mbox, body={"doc": email}, id=email["id"],
+                    index=session.database.dbs.db_mbox, body={"doc": email}, id=email["id"],
                 )
                 lid = email.get("list_raw", "??")
                 await plugins.auditlog.add_entry(session, action="unhide", target=doc, lid=lid, log=f"Unhid email {doc} from {lid} archives")
@@ -110,14 +110,14 @@ async def process(
             try:
                 assert session.database, "Database not connected!"
                 attachment = await session.database.get(
-                    index=session.database.dbs.attachment, id=doc
+                    index=session.database.dbs.db_attachment, id=doc
                 )
             except plugins.database.DBError:
                 pass  # attachment not found
 
             if attachment and isinstance(attachment, dict):
                 await session.database.delete(
-                    index=session.database.dbs.attachment, id=attachment["_id"],
+                    index=session.database.dbs.db_attachment, id=attachment["_id"],
                 )
                 lid = "<system>"
                 await plugins.auditlog.add_entry(session, action="delatt", target=doc, lid=lid, log=f"Removed attachment {doc} from the archives")
@@ -164,7 +164,7 @@ async def process(
 
             # Save edited email
             await session.database.update(
-                index=session.database.dbs.mbox, body={"doc": email}, id=email["id"],
+                index=session.database.dbs.db_mbox, body={"doc": email}, id=email["id"],
             )
 
             # Fetch source, mark as deleted (modified) and save IF anything but just privacy changed
@@ -176,7 +176,7 @@ async def process(
                     source = source["_source"]
                     source["deleted"] = True
                     await session.database.update(
-                        index=session.database.dbs.source, body={"doc": source}, id=docid,
+                        index=session.database.dbs.db_source, body={"doc": source}, id=docid,
                     )
 
             await plugins.auditlog.add_entry(session, action="edit", target=doc, lid=lid,
diff --git a/server/plugins/auditlog.py b/server/plugins/auditlog.py
index ad94d16..955ba82 100644
--- a/server/plugins/auditlog.py
+++ b/server/plugins/auditlog.py
@@ -50,11 +50,11 @@ async def view(
     assert session.database, "No database connection could be found!"
     if not filter:
         res = await session.database.search(
-            index=session.database.dbs.auditlog, size=num_entries, from_=page * num_entries, sort="date:desc",
+            index=session.database.dbs.db_auditlog, size=num_entries, from_=page * num_entries, sort="date:desc",
         )
     else:
         res = await session.database.search(
-            index=session.database.dbs.auditlog, size=num_entries, from_=page * num_entries, sort="date:desc",
+            index=session.database.dbs.db_auditlog, size=num_entries, from_=page * num_entries, sort="date:desc",
             body={
                 "query": {"bool": {"must": [{"terms": {"action": filter}}]}}
             },
@@ -79,7 +79,7 @@ async def add_entry(session: plugins.session.SessionObject, action: str, target:
     assert session.credentials, "No session credentials could be found!"
     assert session.database, "Session not connected to database!"
     await session.database.index(
-        index=session.database.dbs.auditlog,
+        index=session.database.dbs.db_auditlog,
         body={
             "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())),
             "action": action,
diff --git a/server/plugins/background.py b/server/plugins/background.py
index 34296a3..72c626d 100644
--- a/server/plugins/background.py
+++ b/server/plugins/background.py
@@ -63,13 +63,13 @@ async def get_lists(database: plugins.configuration.DBConfig) -> dict:
 
     # Fetch aggregations of all private emails
     # Do this first, so mixed lists are not marked private
-    s = Search(using=db.client, index=db.dbs.mbox).filter(
+    s = Search(using=db.client, index=db.dbs.db_mbox).filter(
         "term", private=True
     )
     s.aggs.bucket("per_list", "terms", field="list_raw", size=limit)
 
     res = await db.search(
-        index=db.dbs.mbox, body=s.to_dict(), size=0
+        index=db.dbs.db_mbox, body=s.to_dict(), size=0
     )
 
     for ml in res["aggregations"]["per_list"]["buckets"]:
@@ -80,13 +80,13 @@ async def get_lists(database: plugins.configuration.DBConfig) -> dict:
         }
 
     # Fetch aggregations of all public emails
-    s = Search(using=db.client, index=db.dbs.mbox).filter(
+    s = Search(using=db.client, index=db.dbs.db_mbox).filter(
         "term", private=False
     )
     s.aggs.bucket("per_list", "terms", field="list_raw", size=limit)
 
     res = await db.search(
-        index=db.dbs.mbox, body=s.to_dict(), size=0
+        index=db.dbs.db_mbox, body=s.to_dict(), size=0
     )
 
     for ml in res["aggregations"]["per_list"]["buckets"]:
@@ -97,12 +97,12 @@ async def get_lists(database: plugins.configuration.DBConfig) -> dict:
         }
 
     # Get 90 day activity, if any
-    s = Search(using=db.client, index=db.dbs.mbox)
+    s = Search(using=db.client, index=db.dbs.db_mbox)
     s = s.filter('range', date = {'gte': ACTIVITY_TIMESPAN})
     s.aggs.bucket("per_list", "terms", field="list_raw", size=limit)
 
     res = await db.search(
-        index=db.dbs.mbox, body=s.to_dict(), size=0
+        index=db.dbs.db_mbox, body=s.to_dict(), size=0
     )
 
     for ml in res["aggregations"]["per_list"]["buckets"]:
@@ -125,7 +125,7 @@ async def get_public_activity(database: plugins.configuration.DBConfig) -> dict:
 
     # Fetch aggregations of all public emails
     s = (
-        Search(using=db, index=db.dbs.mbox)
+        Search(using=db, index=db.dbs.db_mbox)
         .query("match", private=False)
         .filter("range", date={"lt": "now+1d", "gt": "now-14d"})
     )
@@ -137,7 +137,7 @@ async def get_public_activity(database: plugins.configuration.DBConfig) -> dict:
     )
 
     res = await db.search(
-        index=db.dbs.mbox, body=s.to_dict(), size=0
+        index=db.dbs.db_mbox, body=s.to_dict(), size=0
     )
 
     no_emails = res["hits"]["total"]["value"]
@@ -153,12 +153,12 @@ async def get_public_activity(database: plugins.configuration.DBConfig) -> dict:
     thread_count = 0
 
     s = (
-        Search(using=db.client, index=db.dbs.mbox)
+        Search(using=db.client, index=db.dbs.db_mbox)
         .query("match", private=False)
         .filter("range", date={"lt": "now+1d", "gt": "now-14d"})
     )
     async for docs in db.scan(
-        index=db.dbs.mbox,
+        index=db.dbs.db_mbox,
         query=s.to_dict(),
         _source_includes=[
             "message-id",
diff --git a/server/plugins/database.py b/server/plugins/database.py
index cf1f436..eccb18b 100644
--- a/server/plugins/database.py
+++ b/server/plugins/database.py
@@ -33,13 +33,13 @@ class Timeout (elasticsearch.exceptions.ConnectionTimeout):
 
 class DBNames:
     def __init__(self, dbprefix):
-        self.mbox = f"{dbprefix}-mbox"
-        self.source = f"{dbprefix}-source"
-        self.attachment = f"{dbprefix}-attachment"
-        self.account = f"{dbprefix}-account"
-        self.session = f"{dbprefix}-session"
-        self.notification = f"{dbprefix}-notification"
-        self.auditlog = f"{dbprefix}-auditlog"
+        self.db_mbox = f"{dbprefix}-mbox"
+        self.db_source = f"{dbprefix}-source"
+        self.db_attachment = f"{dbprefix}-attachment"
+        self.db_account = f"{dbprefix}-account"
+        self.db_session = f"{dbprefix}-session"
+        self.db_notification = f"{dbprefix}-notification"
+        self.db_auditlog = f"{dbprefix}-auditlog"
 
 
 DBError = elasticsearch.ElasticsearchException
@@ -71,7 +71,7 @@ class Database:
 
     async def search(self, index="", **kwargs):
         if not index:
-            index = self.dbs.mbox
+            index = self.dbs.db_mbox
         try:
             res = await self.client.search(index=index, **kwargs)
             return res
@@ -80,19 +80,19 @@ class Database:
 
     async def get(self, index="", **kwargs):
         if not index:
-            index = self.dbs.mbox
+            index = self.dbs.db_mbox
         res = await self.client.get(index=index, **kwargs)
         return res
 
     async def delete(self, index="", **kwargs):
         if not index:
-            index = self.dbs.session
+            index = self.dbs.db_session
         res = await self.client.delete(index=index, **kwargs)
         return res
 
     async def index(self, index="", **kwargs):
         if not index:
-            index = self.dbs.session
+            index = self.dbs.db_session
         res = await self.client.index(index=index, **kwargs)
         return res
 
@@ -108,7 +108,7 @@ class Database:
 
     async def update(self, index="", **kwargs):
         if not index:
-            index = self.dbs.session
+            index = self.dbs.db_session
         res = await self.client.update(index=index, **kwargs)
         return res
 
diff --git a/server/plugins/messages.py b/server/plugins/messages.py
index 9c63458..42cb948 100644
--- a/server/plugins/messages.py
+++ b/server/plugins/messages.py
@@ -216,9 +216,9 @@ async def get_email(
     source=False,
 ):
     assert session.database, DATABASE_NOT_CONNECTED
-    doctype = session.database.dbs.mbox
+    doctype = session.database.dbs.db_mbox
     if source:
-        doctype = session.database.dbs.source
+        doctype = session.database.dbs.db_source
     # Older indexes may need a match instead of a strict terms agg in order to find
     # emails in DBs that may have been incorrectly analyzed.
     aggtype = "match"
@@ -287,7 +287,7 @@ async def get_email(
 
 async def get_source(session: plugins.session.SessionObject, permalink: str = None, raw=False):
     assert session.database, DATABASE_NOT_CONNECTED
-    doctype = session.database.dbs.source
+    doctype = session.database.dbs.db_source
     try:
         doc = await session.database.get(index=doctype, id=permalink)
     except plugins.database.DBError:
@@ -457,7 +457,7 @@ async def get_activity_span(session, query_defuzzed):
     fuzz_private_only = dict(query_defuzzed)
     fuzz_private_only["filter"] = [{"term": {"private": True}}]
     res = await session.database.search(
-        index=session.database.dbs.mbox,
+        index=session.database.dbs.db_mbox,
         size=0,
         body={
             "query": {"bool": fuzz_private_only},
@@ -486,7 +486,7 @@ async def get_activity_span(session, query_defuzzed):
 
     # Get oldest and youngest doc in single scan, as well as a monthly histogram
     res = await session.database.search(
-        index=session.database.dbs.mbox,
+        index=session.database.dbs.db_mbox,
         size=0,
         body={"query": {"bool": query_defuzzed},
             "aggs": {
diff --git a/server/plugins/session.py b/server/plugins/session.py
index 4b2cbc2..ed44730 100644
--- a/server/plugins/session.py
+++ b/server/plugins/session.py
@@ -143,14 +143,14 @@ async def get_session(
     if session_id and session.database:
         try:
             session_doc = await session.database.get(
-                session.database.dbs.session, id=session_id
+                session.database.dbs.db_session, id=session_id
             )
             last_update = session_doc["_source"]["updated"]
             session.cookie = session_id
             # Check that this cookie ain't too old. If it is, delete it and return bare-bones session object
             if (now - last_update) > FOAL_MAX_SESSION_AGE:
                 session.database.delete(
-                    index=session.database.dbs.session, id=session_id
+                    index=session.database.dbs.db_session, id=session_id
                 )
                 return session
 
@@ -158,7 +158,7 @@ async def get_session(
             cid = session_doc["_source"]["cid"]
             if cid:
                 account_doc = await session.database.get(
-                    session.database.dbs.account, id=cid
+                    session.database.dbs.db_account, id=cid
                 )
                 creds = account_doc["_source"]["credentials"]
                 internal = account_doc["_source"]["internal"]
@@ -212,7 +212,7 @@ async def save_session(session: SessionObject):
     """Save a session object in the ES database"""
     assert session.database, DATABASE_NOT_CONNECTED
     await session.database.index(
-        index=session.database.dbs.session,
+        index=session.database.dbs.db_session,
         id=session.cookie,
         body={
             "cookie": session.cookie,
@@ -225,7 +225,7 @@ async def save_session(session: SessionObject):
 async def remove_session(session: SessionObject):
     """Remove a session object in the ES database"""
     assert session.database, DATABASE_NOT_CONNECTED
-    await session.database.delete(index=session.database.dbs.session, id=session.cookie)
+    await session.database.delete(index=session.database.dbs.db_session, id=session.cookie)
 
 
 async def save_credentials(session: SessionObject):
@@ -233,7 +233,7 @@ async def save_credentials(session: SessionObject):
     assert session.database, DATABASE_NOT_CONNECTED
     assert session.credentials, "Session object without credentials, cannot save!"
     await session.database.index(
-        index=session.database.dbs.account,
+        index=session.database.dbs.db_account,
         id=session.cid,
         body={
             "cid": session.cid,