Compare commits

...

6 Commits
jfw ... master

Author SHA1 Message Date
Simon Zeyer
f64879398f Add sleep delay in run function to prevent tight loop execution 2025-06-16 21:22:55 +00:00
Simon Zeyer
3e33de4606 Fix flag setting method in IMAP message handling 2025-06-09 12:01:03 +00:00
Simon Zeyer
8b4bc74b1d Add PDF generation and printing functionality using Google Chrome 2025-06-09 11:52:44 +00:00
Simon Zeyer
2b331fbd1f Remove redundant idle check logic for existing messages in IMAP connection 2025-06-09 11:37:44 +00:00
Simon Zeyer
b528b0a748 Refactor idle check logic in IMAP connection to improve message handling 2025-06-09 11:12:18 +00:00
Simon Zeyer
5127caed03 Add IMAP support and refactor environment configuration
- Introduced `imap_connect.py` for handling IMAP email interactions.
- Created `run.py` to manage execution based on the selected mode (IMAP or EWS).
- Updated Dockerfile and .env.dev to include necessary environment variables.
- Enhanced logging and email processing in `exchange_connect.py`.
- Added `weasyprint` and `imapclient` to requirements.
2025-06-09 10:53:08 +00:00
8 changed files with 275 additions and 75 deletions

View File

@ -1,3 +1,4 @@
mode="EWS"
username=""
password=""
server=""

View File

@ -1,5 +1,6 @@
FROM python:3.10-bullseye
ENV mode="EWS"
ENV username=
ENV password=
ENV server="exchange.sankt-wendel.de"
@ -54,6 +55,24 @@ RUN useradd \
print \
&& sed -i '/%sudo[[:space:]]/ s/ALL[[:space:]]*$/NOPASSWD:ALL/' /etc/sudoers
# Print PDF
RUN apt-get update && apt-get install -y \
apt-transport-https \
ca-certificates \
curl \
gnupg \
--no-install-recommends \
&& curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& echo "deb [arch=amd64] https://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google-chrome.list \
&& apt-get update && apt-get install -y \
google-chrome-stable \
--no-install-recommends
# It won't run from the root user.
RUN groupadd chrome && useradd -g chrome -s /bin/bash -G audio,video chrome \
&& mkdir -p /home/chrome && chown -R chrome:chrome /home/chrome
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
@ -61,4 +80,4 @@ COPY ./app .
COPY *.crt /usr/local/share/ca-certificates/
RUN update-ca-certificates
CMD [ "sh","-c","/etc/init.d/cups start && python3 /usr/src/app/exchange_connect.py" ]
CMD [ "sh","-c","/etc/init.d/cups start && python3 /usr/src/app/run.py" ]

View File

@ -21,65 +21,65 @@ Message.register("alarmfax_parser_verarbeitet", alarmfax_parser_verarbeitet)
Message.register("alarmfax_parser_id", alarmfax_parser_id)
#print([f.name for f in Message.FIELDS if f.is_searchable])
threads = {}
def run():
threads = {}
format = "%(asctime)s|%(threadName)s: %(message)s"
logging.basicConfig(format=format, level=logging.INFO,
datefmt="%Y-%m-%d %H:%M:%S")
format = "%(asctime)s|%(threadName)s: %(message)s"
logging.basicConfig(format=format, level=logging.INFO,
datefmt="%Y-%m-%d %H:%M:%S")
def eventHandler(ELEMENT_NAME, item_id, item_changekey):
if (ELEMENT_NAME == 'ModifiedEvent' and IS_DEV) or ELEMENT_NAME == 'NewMailEvent' or ELEMENT_NAME == 'SearchFolderEvent':
logging.info(ELEMENT_NAME + " - get Mail")
m: Message = a.inbox.get(id=item_id, changekey=item_changekey)
if m.alarmfax_parser_verarbeitet and parser_id in ("" if m.alarmfax_parser_id == None else m.alarmfax_parser_id):
logging.info("Mail {} bereits verarbeitet.. ignoriere".format(m.id))
if not IS_DEV:
return
else:
m.alarmfax_parser_verarbeitet = True
m.alarmfax_parser_id = ("" if m.alarmfax_parser_id == None else m.alarmfax_parser_id) + parser_id
m.save(update_fields=["alarmfax_parser_verarbeitet","alarmfax_parser_id"])
logging.info("got Mail {} von {}".format(m.subject, m.sender.email_address))
if m.sender.email_address in filter_from:
parsed_body = parse_securecad_message(m.body)
logging.debug(parsed_body)
if parsed_body != None:
if 'ALARMDEPESCHE' in parsed_body:
logging.info("Alarm für: {}".format(parsed_body['ALARMDEPESCHE']))
webhook(parsed_body)
alarminator_api(parsed_body)
cups_print(parsed_body,m.body)
pass
def eventHandler(ELEMENT_NAME, item_id, item_changekey):
if (ELEMENT_NAME == 'ModifiedEvent' and IS_DEV) or ELEMENT_NAME == 'NewMailEvent' or ELEMENT_NAME == 'SearchFolderEvent':
logging.info(ELEMENT_NAME + " - get Mail")
m: Message = a.inbox.get(id=item_id, changekey=item_changekey)
if m.alarmfax_parser_verarbeitet and parser_id in ("" if m.alarmfax_parser_id == None else m.alarmfax_parser_id):
logging.info("Mail {} bereits verarbeitet.. ignoriere".format(m.id))
if not IS_DEV:
return
else:
m.alarmfax_parser_verarbeitet = True
m.alarmfax_parser_id = ("" if m.alarmfax_parser_id == None else m.alarmfax_parser_id) + parser_id
m.save(update_fields=["alarmfax_parser_verarbeitet","alarmfax_parser_id"])
logging.info("got Mail {} von {}".format(m.subject, m.sender.email_address))
if m.sender.email_address in filter_from:
parsed_body = parse_securecad_message(m.body)
logging.debug(parsed_body)
if parsed_body != None:
if 'ALARMDEPESCHE' in parsed_body:
logging.info("Alarm für: {}".format(parsed_body['ALARMDEPESCHE']))
webhook(parsed_body)
alarminator_api(parsed_body)
cups_print(parsed_body,m.body)
pass
def folder_event_subscriber(folder: Folder):
logging.info('folder_event_subscriber startet for Folder: {}'.format(folder.name))
while True:
# filtern des ordners nach mails der letzten 24h, die nicht verarbeitet wurden
now = datetime.datetime.now(a.default_timezone)
folder.all()
folder.all()
filtered_items = folder.filter(
datetime_received__range=(now - datetime.timedelta(days=1), now + datetime.timedelta(days=1))
).exclude(
alarmfax_parser_verarbeitet=True,
alarmfax_parser_id__contains=parser_id
)
cnt = filtered_items.count()
if cnt > 0:
logging.info("{} Mails nicht verarbeitet in den letzten 2 Tagen in ordner: {}".format(cnt, folder.name))
filtered_items = filtered_items.values("id", "changekey")
for m in filtered_items:
t = Thread(target=eventHandler, args=('SearchFolderEvent',m["id"],m["changekey"],),name="eventHandler: SearchFolderEvent ({})".format(m["id"]))
t.start()
# aktives warten auf streaming_events. maximal eine minute lang, dann wird nochmal der ordner durchsucht, falls mails angekommen sind während eines timeout/cooldown.
subscription_id = folder.subscribe_to_streaming()
for notification in folder.get_streaming_events(subscription_id, connection_timeout=1):
for event in notification.events:
if event.item_id != None:
t = Thread(target=eventHandler, args=(event.ELEMENT_NAME,event.item_id.id,event.item_id.changekey,),name="eventHandler: {} ({})".format(event.ELEMENT_NAME, event.item_id.id))
def folder_event_subscriber(folder: Folder):
logging.info('folder_event_subscriber startet for Folder: {}'.format(folder.name))
while True:
# filtern des ordners nach mails der letzten 24h, die nicht verarbeitet wurden
now = datetime.datetime.now(a.default_timezone)
folder.all()
folder.all()
filtered_items = folder.filter(
datetime_received__range=(now - datetime.timedelta(days=1), now + datetime.timedelta(days=1))
).exclude(
alarmfax_parser_verarbeitet=True,
alarmfax_parser_id__contains=parser_id
)
cnt = filtered_items.count()
if cnt > 0:
logging.info("{} Mails nicht verarbeitet in den letzten 2 Tagen in ordner: {}".format(cnt, folder.name))
filtered_items = filtered_items.values("id", "changekey")
for m in filtered_items:
t = Thread(target=eventHandler, args=('SearchFolderEvent',m["id"],m["changekey"],),name="eventHandler: SearchFolderEvent ({})".format(m["id"]))
t.start()
# aktives warten auf streaming_events. maximal eine minute lang, dann wird nochmal der ordner durchsucht, falls mails angekommen sind während eines timeout/cooldown.
subscription_id = folder.subscribe_to_streaming()
for notification in folder.get_streaming_events(subscription_id, connection_timeout=1):
for event in notification.events:
if event.item_id != None:
t = Thread(target=eventHandler, args=(event.ELEMENT_NAME,event.item_id.id,event.item_id.changekey,),name="eventHandler: {} ({})".format(event.ELEMENT_NAME, event.item_id.id))
t.start()
if __name__ == "__main__":
try:
username = os.environ.get('username')
password = os.environ.get('password')

View File

@ -5,6 +5,7 @@ import cups
from weasyprint import HTML
from requests.adapters import Retry
import uuid
import subprocess
retries = Retry(total=5,
backoff_factor=0.1,
@ -26,18 +27,18 @@ def alarminator_api(parsed_body: dict):
alarminator_api = os.environ.get('alarminator_api') if os.environ.get('alarminator_api') else ""
alarminator_token = os.environ.get('alarminator_token') if os.environ.get('alarminator_token') else ""
alarminator_zvies_use_PEALGRP = True if os.environ.get('alarminator_zvies_use_PEALGRP') == 'True' else False
maps_api_key = os.environ.get('MAPS_API_KEY',"")
if alarminator_api != "" and alarminator_token != "":
if 'ALARMDEPESCHE' in parsed_body: # sendAlarm triggern
logging.info("GET zu {}/operations/sendAlarm".format(alarminator_api))
s = requests.Session()
s.mount('https://', requests.adapters.HTTPAdapter(max_retries=retries))
#&object=Kirmesplatz &district=Oberlinxweilerstrasse &subject=THK (TH klein &street=Oberlinxweilerstrasse &ils=\"secur.CAD\" <leitstelle@zrf-saar.de>&connector=mailParser&token=ea2110e1-11b9-421f-a53d-96cc0fc82c31
req_string = ""
req_string +="?token={}".format(alarminator_token)
if 'Einsatzbeginn(Soll)' in parsed_body:
req_string +="&alarmdate={}".format(parsed_body['Einsatzbeginn(Soll)'].split(" ")[0])
req_string +="&alarmtime={}".format(parsed_body['Einsatzbeginn(Soll)'].split(" ")[1])
if 'Auftragsnummer' in parsed_body:
if 'Auftragsnummer' in parsed_body and not (os.environ.get('IS_DEV') and os.environ.get('IS_DEV') == "True"):
req_string +="&operationnumber={}".format(parsed_body['Auftragsnummer'])
if 'Sachverhalt' in parsed_body:
req_string +="&message={}".format(parsed_body['Sachverhalt'])
@ -48,21 +49,29 @@ def alarminator_api(parsed_body: dict):
req_string +="&location={}".format(parsed_body['Einsatzziel']['PLZ / Ort'])
if 'Objekt' in parsed_body['Einsatzziel']:
req_string +="&object={}".format(parsed_body['Einsatzziel']['Objekt'])
street = []
if 'Strasse' in parsed_body['Einsatzziel']:
req_string +="&street={}".format(parsed_body['Einsatzziel']['Strasse']) + (("\n"+parsed_body['Einsatzziel']['Info ']) if 'Info ' in parsed_body['Einsatzziel'] else "" )
street.append(parsed_body['Einsatzziel']['Strasse'])
if 'Zusatz Strasse' in parsed_body['Einsatzziel']:
street.append(parsed_body['Einsatzziel']['Zusatz Strasse'])
if 'Strasse / Hs.-Nr.' in parsed_body['Einsatzziel']:
req_string +="&street={}".format(parsed_body['Einsatzziel']['Strasse / Hs.-Nr.'] + (("\n"+parsed_body['Einsatzziel']['Info ']) if 'Info ' in parsed_body['Einsatzziel'] else "" ))
street.append(parsed_body['Einsatzziel']['Strasse / Hs.-Nr.'])
if 'Info' in parsed_body['Einsatzziel']:
street.append(parsed_body['Einsatzziel']['Info'])
if street.__len__() > 0:
req_string +="&street={}".format("\n".join(street))
if 'Einsatzmittelliste' in parsed_body:
gear = []
for r in parsed_body['Einsatzmittelliste']:
if r['Typ'] != 'PEALGRP':
#if r['Typ'] != 'PEALGRP':
if r['Ressourcen'] not in gear:
gear.append(r['Ressourcen'])
req_string +="&gear={}".format(';'.join(gear))
#req_string +="&district={}".format('district')
#req_string +="&floor={}".format('floor')
#req_string +="&section={}".format('section')
req_string +="&district={}".format('district')
req_string +="&floor={}".format('floor')
req_string +="&section={}".format('section')
req_string +="&keywordRaw={}".format(parsed_body['Einsatzstichwort'])
#req_string +="&keywordId={}".format('keywordId')
req_string +="&keywordCategory={}".format(parsed_body['Einsatzstichwort'].split("(")[0])
@ -76,9 +85,30 @@ def alarminator_api(parsed_body: dict):
req_string +="&zveis={}".format(';'.join(zveis))
else:
req_string +="&zveis={}".format(parsed_body['ALARMDEPESCHE'])
if maps_api_key != "":
try:
maps_address_param = []
if 'Objekt' in parsed_body['Einsatzziel']:
maps_address_param.append("{}".format(parsed_body['Einsatzziel']['Objekt']))
if street.__len__() > 0:
maps_address_param.append('{}'.format(",".join(street)))
if 'Stadt' in parsed_body['Einsatzziel']:
maps_address_param.append("{}".format(parsed_body['Einsatzziel']['Stadt']))
if 'PLZ / Ort' in parsed_body['Einsatzziel']:
maps_address_param.append("{}".format(parsed_body['Einsatzziel']['PLZ / Ort']))
maps_request = requests.get('https://maps.google.com/maps/api/geocode/json?address={}&key={}'.format(','.join(maps_address_param),maps_api_key))
if maps_request.json()['results'].__len__() == 1:
req_string +="&lat={}".format(maps_request.json()['results'][0]['geometry']['location']['lat'])
req_string +="&lon={}".format(maps_request.json()['results'][0]['geometry']['location']['lng'])
except Exception as maps_e:
logging.error('error getting maps',maps_e)
# req_string +="&gkx={}".format() if False
# req_string +="&gky={}".format() if False
# req_string +="&lat={}".format() if False
# if False
# req_string +="&lon={}".format() if False
subject = ""
if 'Notfallgeschehen' in parsed_body:
@ -88,23 +118,37 @@ def alarminator_api(parsed_body: dict):
req_string +="&subject={}".format(subject)
req_string +="&ils={}".format("ILS Saar")
req_string +="&connector={}".format("MailParser")
if os.environ.get('IS_DEV') and os.environ.get('IS_DEV') == "True":
req_string +="&isTest=1"
s.get(alarminator_api+"/operations/sendAlarm/"+req_string)
except Exception as e:
logging.error("alarminator_api", e)
def generate_pdf(html_body, filename):
f = "/tmp/{}.html".format(uuid.uuid4())
with open(f,"w") as _f:
_f.write(html_body)
subprocess.run(["/usr/bin/google-chrome-stable", "--headless", "--no-sandbox", "--disable-gpu", "--print-to-pdf="+filename, "--no-pdf-header-footer", "--print-to-pdf-no-header", "--no-margins", f])
if os.path.exists(f):
os.remove(f)
def cups_print(parsed_body: dict, body: str):
fname = "/tmp/{}.pdf".format(uuid.uuid4())
# if os.environ.get('IS_DEV') and os.environ.get('IS_DEV') == "True":
# generate_pdf(body, "{}.pdf".format(uuid.uuid4()))
fname = "{}.pdf".format(uuid.uuid4())
try:
conn = cups.Connection ()
printer = os.environ.get('printer',"DEFAULT")
printer_arr = os.environ.get('printer',"DEFAULT").split(";")
print_num = int(os.environ.get('print_num',0))
if 'ALARMDEPESCHE' in parsed_body:
with open(fname,"wb") as f:
f.write(HTML(string=body, base_url="").write_pdf())
for i in range(0, print_num):
conn.printFile (printer, fname, "Alarmfax", {})
os.remove(fname)
#if printer_arr.__len__() > 0:
generate_pdf(body, fname)
for printer in printer_arr:
print(printer)
if 'ALARMDEPESCHE' in parsed_body:
for i in range(0, print_num):
conn.printFile (printer, fname, "Alarmfax", {})
except Exception as e:
logging.error("cups_print", e)
finally:
if os.path.exists(fname):
os.remove(fname)
logging.error("cups_print", e)
os.remove(fname)

124
app/imap_connect.py Normal file
View File

@ -0,0 +1,124 @@
from datetime import timedelta, datetime
import os
import ssl
import email
import logging
from imapclient import IMAPClient
from threading import Thread
from securecad_parser import parse_securecad_message
from hooks import webhook, alarminator_api, cups_print
import re
from time import sleep
def run():
threads = {}
format = "%(asctime)s|%(threadName)s: %(message)s"
logging.basicConfig(format=format, level=logging.INFO,
datefmt="%Y-%m-%d %H:%M:%S")
def eventHandler(ELEMENT_NAME, uid, message_data, _server: IMAPClient = None):
email_message = email.message_from_bytes(message_data[b'RFC822'])
email_from: list[str] = re.findall(r'([\w\.-]+@[\w\.-]+)', email_message.get('From'))
flags = _server.get_flags(uid)
logging.info(ELEMENT_NAME + " - get Mail")
if 'Processed_{}'.format(parser_id).encode() in flags[uid]:
logging.info("Mail {} bereits verarbeitet.. ignoriere".format(uid))
if not IS_DEV:
return
else:
_server.add_flags(uid, ['\\SEEN','Processed_{}'.format(parser_id)])
logging.info("got Mail {} von {}".format(email_message.get('Subject'), email_from))
if any(mail in filter_from for mail in email_from):
# Get HTML body
html_body = ""
if email_message.is_multipart():
for part in email_message.walk():
if part.get_content_type() == 'text/html':
html_body = part.get_payload(decode=True).decode(part.get_content_charset() or 'utf-8', errors='replace')
break
else:
if email_message.get_content_type() == 'text/html':
html_body = email_message.get_payload(decode=True).decode(email_message.get_content_charset() or 'utf-8', errors='replace')
parsed_body = parse_securecad_message(html_body)
logging.debug(parsed_body)
if parsed_body != None:
if 'ALARMDEPESCHE' in parsed_body:
logging.info("Alarm für: {}".format(parsed_body['ALARMDEPESCHE']))
webhook(parsed_body)
alarminator_api(parsed_body)
cups_print(parsed_body,html_body)
pass
def folder_event_subscriber(folder: str):
logging.info('folder_event_subscriber startet for Folder: {}'.format(folder))
with IMAPClient(server, ssl_context=ssl_context) as _server:
_server.login(username, password)
while True:
# filtern des ordners nach mails der letzten 24h, die nicht verarbeitet wurden
now = datetime.now()
_server.select_folder(folder, readonly=False)
q = ['SENTSINCE', now - timedelta(days=1),'NOT','KEYWORD', 'Processed_{}'.format(parser_id)]
if IS_DEV:
q = ['SENTSINCE', now - timedelta(days=1),'UNSEEN']
q = ['UNSEEN']
messages = _server.search(q)
cnt = messages.__len__()
if cnt > 0:
logging.info("{} Mails nicht verarbeitet in den letzten 2 Tagen in ordner: {}".format(cnt, folder))
for uid, message_data in _server.fetch(messages, 'RFC822').items():
# IMAPClient ist nicht thread-safe, daher wird hier der _server übergeben und kein Thread verwendet.
eventHandler('SearchFolderEvent', uid, message_data, _server)
# t = Thread(target=eventHandler, args=('SearchFolderEvent',uid,message_data,),name="eventHandler: SearchFolderEvent ({})".format(uid))
# t.start()
# aktives warten auf streaming_events. maximal eine minute lang, dann wird nochmal der ordner durchsucht, falls mails angekommen sind während eines timeout/cooldown.
_server.idle()
try:
logging.debug("Idle check for folder: {}".format(folder))
messages = _server.idle_check(timeout=60) # Timeout after 60 seconds
_server.idle_done()
# In den events stehen nur vorhandenen nachrichten. Exists ist nicht die neue Nachricht, sondern eine bereits vorhandene.
# for item in messages:
# if item[1] == b'EXISTS':
# logging.info("New messages in folder: {}".format(folder))
# for uid, message_data in _server.fetch([item[0]], 'RFC822').items():
# if uid:
# eventHandler('NewMailEvent', uid, message_data, _server)
except Exception as e:
logging.error("Error during idle check: {}".format(e))
username = os.environ.get('username')
password = os.environ.get('password')
server = os.environ.get('server')
folders = os.environ.get('folders',"")
parser_id = os.environ.get('alarmfax_parser_id',"")
primary_smtp_address = os.environ.get('primary_smtp_address')
filter_from = os.environ.get('filter_from').split(";") if os.environ.get('filter_from') else []
IS_DEV = True if os.environ.get('IS_DEV') and os.environ.get('IS_DEV') == "True" else False
if IS_DEV:
logging.getLogger().setLevel(logging.INFO)
ssl_context = ssl.create_default_context()
with IMAPClient(server, ssl_context=ssl_context) as _server:
_server.login(username, password)
_server.logout()
folders_to_subscribe = []
for f in folders.split(";"):
if f == "":
folders_to_subscribe.append('INBOX')
else:
folders_to_subscribe.append(f)
while True:
for f in folders_to_subscribe:
if not f in threads or not threads[f].is_alive():
logging.info("folder_event_subscriber for folder \"{}\" not alive, starting".format(f))
t = Thread(target=folder_event_subscriber, args=(f,), daemon=True, name="folder_event_subscriber {}".format(f))
threads[f] = t
t.start()
sleep(1)

10
app/run.py Normal file
View File

@ -0,0 +1,10 @@
import os
from imap_connect import run as run_imap
from exchange_connect import run as run_ews
mode = os.environ.get('mode')
if mode == 'IMAP':
run_imap()
elif mode == 'EWS':
run_ews()
else:
raise ValueError("Invalid mode specified. Use 'IMAP' or 'EWS'.")

View File

@ -6,6 +6,7 @@ services:
restart: always
privileged: true
environment:
- mode=${mode}
- username=${username}
- password=${password}
- server=${server}

View File

@ -28,4 +28,5 @@ urllib3==1.26.14
xmltodict==0.12.0
xmltojson==2.0.1
pycups==2.0.1
weasyprint
weasyprint
imapclient==3.0.1