manage imap/smtp directly. remove srmail dependency

pull/3/head
Yax 5 years ago
parent 09f6e1a250
commit b6da0405fc

@ -1,3 +1,4 @@
;
; Default configuration
[main]
lang = fr
@ -5,18 +6,24 @@ db_url = sqlite:///db.sqlite
newcomment_polling = 60
[http]
root_url = http://localhost:8100
host = 0.0.0.0
host = 127.0.0.1
port = 8100
[security]
salt = BRRJRqXgGpXWrgTidBPcixIThHpDuKc0
secret = Uqca5Kc8xuU6THz9
[rss]
proto = http
proto = https
file = comments.xml
[mail]
fetch_polling = 30
mailer_url = http://localhost:8000
[imap]
polling = 120
host = mail.gandi.net
ssl = false
port = 993
login = blog@mydomain.com
password = MYPASSWORD
[smtp]
host = mail.gandi.net
starttls = true
port = 587
login = blog@mydomain.com
password = MYPASSWORD

18
poetry.lock generated

@ -363,6 +363,17 @@ urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"]
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
[[package]]
category = "dev"
description = "a python refactoring library..."
name = "rope"
optional = false
python-versions = "*"
version = "0.16.0"
[package.extras]
dev = ["pytest"]
[[package]]
category = "main"
description = "Python 2 and 3 compatibility utilities"
@ -449,7 +460,7 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pathlib2", "contextlib2", "unittest2"]
[metadata]
content-hash = "6270dbd1455ca926e89cdd874748cf8cee18c2ef5a10a05b6dc7f7100e2482d5"
content-hash = "d698fc06cf58f4d228449cb76f48e8d739c323abd535427746d70dbbcaa4924c"
python-versions = "^3.7"
[metadata.files]
@ -618,6 +629,11 @@ requests = [
{file = "requests-2.22.0-py2.py3-none-any.whl", hash = "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"},
{file = "requests-2.22.0.tar.gz", hash = "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"},
]
rope = [
{file = "rope-0.16.0-py2-none-any.whl", hash = "sha256:ae1fa2fd56f64f4cc9be46493ce54bed0dd12dee03980c61a4393d89d84029ad"},
{file = "rope-0.16.0-py3-none-any.whl", hash = "sha256:52423a7eebb5306a6d63bdc91a7c657db51ac9babfb8341c9a1440831ecf3203"},
{file = "rope-0.16.0.tar.gz", hash = "sha256:d2830142c2e046f5fc26a022fe680675b6f48f81c7fc1f03a950706e746e9dfe"},
]
six = [
{file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"},
{file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"},

@ -19,6 +19,7 @@ requests = "^2.22.0"
[tool.poetry.dev-dependencies]
pytest = "^5.2"
black = {version = "^19.10b0", allow-prereleases = true}
rope = "^0.16.0"
[build-system]
requires = ["poetry>=0.12"]

@ -4,23 +4,30 @@
import profig
# constants
FLASK_APP = "flask.app"
FLASK_APP = 'flask.app'
DB_URL = "main.db_url"
LANG = "main.lang"
DB_URL = 'main.db_url'
LANG = 'main.lang'
COMMENT_POLLING = 'main.newcomment_polling'
HTTP_HOST = "http.host"
HTTP_PORT = "http.port"
HTTP_HOST = 'http.host'
HTTP_PORT = 'http.port'
SECURITY_SALT = "security.salt"
SECURITY_SECRET = "security.secret"
RSS_PROTO = 'rss.proto'
RSS_FILE = 'rss.file'
RSS_PROTO = "rss.proto"
RSS_FILE = "rss.file"
IMAP_POLLING = 'imap.polling'
IMAP_SSL = 'imap.ssl'
IMAP_HOST = 'imap.host'
IMAP_PORT = 'imap.port'
IMAP_LOGIN = 'imap.login'
IMAP_PASSWORD = 'imap.password'
MAIL_POLLING = "mail.fetch_polling"
COMMENT_POLLING = "main.newcomment_polling"
MAILER_URL = "mail.mailer_url"
SMTP_STARTTLS = 'smtp.starttls'
SMTP_HOST = 'smtp.host'
SMTP_PORT = 'smtp.port'
SMTP_LOGIN = 'smtp.login'
SMTP_PASSWORD = 'smtp.password'
# variable
@ -38,16 +45,12 @@ def get(key):
return params[key]
def getInt(key):
def get_int(key):
return int(params[key])
def _str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
def getBool(key):
return _str2bool(params[key])
def get_bool(key):
return params[key].lower() in ('yes', 'true', '1')
def flaskapp():

@ -2,21 +2,21 @@
# -*- coding: utf-8 -*-
import logging
from datetime import datetime
import time
import re
from core import mailer
import time
from datetime import datetime
from core import mailer, rss
from core.templater import get_template
from core import rss
from model.comment import Comment
from model.comment import Site
from model.comment import Comment, Site
from model.email import Email
logger = logging.getLogger(__name__)
def cron(func):
def wrapper():
logger.debug("execute CRON " + func.__name__)
logger.debug('execute CRON ' + func.__name__)
func()
return wrapper
@ -26,10 +26,10 @@ def cron(func):
def fetch_mail_answers():
for msg in mailer.fetch():
if re.search(r".*STACOSYS.*\[(\d+)\:(\w+)\]", msg["subject"], re.DOTALL):
full_msg = mailer.get(msg["id"])
if full_msg and reply_comment_email(full_msg['email']):
mailer.delete(msg["id"])
if re.search(r'.*STACOSYS.*\[(\d+)\:(\w+)\]', msg.subject, re.DOTALL):
if full_msg and _reply_comment_email(msg):
mailer.delete(msg.id)
@cron
def submit_new_comment():
@ -37,42 +37,36 @@ def submit_new_comment():
for comment in Comment.select().where(Comment.notified.is_null()):
comment_list = (
"author: %s" % comment.author_name,
"site: %s" % comment.author_site,
"date: %s" % comment.created,
"url: %s" % comment.url,
"",
"%s" % comment.content,
"",
'author: %s' % comment.author_name,
'site: %s' % comment.author_site,
'date: %s' % comment.created,
'url: %s' % comment.url,
'',
'%s' % comment.content,
'',
)
comment_text = "\n".join(comment_list)
email_body = get_template("new_comment").render(
comment_text = '\n'.join(comment_list)
email_body = get_template('new_comment').render(
url=comment.url, comment=comment_text
)
# send email
site = Site.get(Site.id == comment.site)
subject = "STACOSYS %s: [%d:%s]" % (site.name, comment.id, site.token)
mailer.send(site.admin_email, subject, email_body)
logger.debug("new comment processed ")
# notify site admin and save notification datetime
comment.notify_site_admin()
subject = 'STACOSYS %s: [%d:%s]' % (site.name, comment.id, site.token)
if mailer.send(site.admin_email, subject, email_body):
logger.debug('new comment processed ')
# notify site admin and save notification datetime
comment.notify_site_admin()
else:
logger.warn('rescheduled. send mail failure ' + subject)
def reply_comment_email(data):
from_email = data["from"]
subject = data["subject"]
message = ""
for part in data["parts"]:
if part["content-type"] == "text/plain":
message = part["content"]
break
def _reply_comment_email(email):
m = re.search(r"\[(\d+)\:(\w+)\]", subject)
m = re.search(r'\[(\d+)\:(\w+)\]', email.subject)
if not m:
logger.warn("ignore corrupted email. No token %s" % subject)
logger.warn('ignore corrupted email. No token %s' % email.subject)
return
comment_id = int(m.group(1))
token = m.group(2)
@ -81,37 +75,39 @@ def reply_comment_email(data):
try:
comment = Comment.select().where(Comment.id == comment_id).get()
except:
logger.warn("unknown comment %d" % comment_id)
logger.warn('unknown comment %d' % comment_id)
return True
if comment.published:
logger.warn("ignore already published email. token %d" % comment_id)
logger.warn('ignore already published email. token %d' % comment_id)
return
if comment.site.token != token:
logger.warn("ignore corrupted email. Unknown token %d" % comment_id)
logger.warn('ignore corrupted email. Unknown token %d' % comment_id)
return
if not message:
logger.warn("ignore empty email")
if not email.content:
logger.warn('ignore empty email')
return
# safe logic: no answer or unknown answer is a go for publishing
if message[:2].upper() in ("NO"):
logger.info("discard comment: %d" % comment_id)
if email.content[:2].upper() in ('NO'):
logger.info('discard comment: %d' % comment_id)
comment.delete_instance()
email_body = get_template("drop_comment").render(original=message)
mailer.send(from_email, "Re: " + subject, email_body)
new_email_body = get_template('drop_comment').render(original=email.content)
if not mailer.send(email.from_addr, 'Re: ' + email.subject, new_email_body):
logger.warn('minor failure. cannot send rejection mail ' + email.subject)
else:
# save publishing datetime
comment.publish()
logger.info("commit comment: %d" % comment_id)
logger.info('commit comment: %d' % comment_id)
# rebuild RSS
rss.generate_site(token)
# send approval confirmation email to admin
email_body = get_template("approve_comment").render(original=message)
mailer.send(from_email, "Re: " + subject, email_body)
new_email_body = get_template('approve_comment').render(original=email.content)
if not mailer.send(email.from_addr, 'Re: ' + email.subject, new_email_body):
logger.warn('minor failure. cannot send approval email ' + email.subject)
return True
return True

@ -1,9 +1,10 @@
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from conf import config
from playhouse.db_url import connect
from conf import config
def get_db():
return connect(config.get(config.DB_URL))

@ -0,0 +1,152 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import base64
import datetime
import email
import imaplib
import logging
import re
filename_re = re.compile('filename="(.+)"|filename=([^;\n\r"\']+)', re.I | re.S)
class Mailbox(object):
def __init__(self, host, port, ssl, login, password):
self.logger = logging.getLogger(__name__)
self.host = host
self.port = port
self.ssl = ssl
self.login = login
self.password = password
def __enter__(self):
if self.ssl:
self.imap = imaplib.IMAP4_SSL(self.host, self.port)
else:
self.imap = imaplib.IMAP4(self.host, self.port)
self.imap.login(self.login, self.password)
return self
def __exit__(self, type, value, traceback):
self.imap.close()
self.imap.logout()
def get_count(self):
self.imap.select('Inbox')
_, data = self.imap.search(None, 'ALL')
return sum(1 for num in data[0].split())
def fetch_raw_message(self, num):
self.imap.select('Inbox')
_, data = self.imap.fetch(str(num), '(RFC822)')
email_msg = email.message_from_bytes(data[0][1])
return email_msg
def fetch_message(self, num):
raw_msg = self.fetch_raw_message(num)
msg = {}
msg['encoding'] = 'UTF-8'
msg['index'] = num
dt = parse_date(raw_msg['Date']).strftime('%Y-%m-%d %H:%M:%S')
msg['datetime'] = dt
msg['from'] = raw_msg['From']
msg['to'] = raw_msg['To']
subject = email_nonascii_to_uft8(raw_msg['Subject'])
msg['subject'] = subject
parts = []
attachments = []
for part in raw_msg.walk():
if part.is_multipart():
continue
content_disposition = part.get('Content-Disposition', None)
if content_disposition:
# we have attachment
r = filename_re.findall(content_disposition)
if r:
filename = sorted(r[0])[1]
else:
filename = 'undefined'
content = base64.b64encode(part.get_payload(decode=True))
content = content.decode()
a = {
'filename': email_nonascii_to_uft8(filename),
'content': content,
'content-type': part.get_content_type(),
}
attachments.append(a)
else:
part_item = {}
content = part.get_payload(decode=True)
content_type = part.get_content_type()
try:
charset = part.get_param('charset', None)
if charset:
content = to_utf8(content, charset)
elif type(content) == bytes:
content = content.decode('utf8')
except:
self.logger.exception()
# RFC 3676: remove automatic word-wrapping
content = content.replace(' \r\n', ' ')
part_item['content'] = content
part_item['content-type'] = content_type
parts.append(part_item)
if parts:
msg['parts'] = parts
if attachments:
msg['attachments'] = attachments
return msg
def delete_message(self, num):
self.imap.select('Inbox')
self.imap.store(str(num), '+FLAGS', r'\Deleted')
self.imap.expunge()
def delete_all(self):
self.imap.select('Inbox')
_, data = self.imap.search(None, 'ALL')
for num in data[0].split():
self.imap.store(num, '+FLAGS', r'\Deleted')
self.imap.expunge()
def print_msgs(self):
self.imap.select('Inbox')
_, data = self.imap.search(None, 'ALL')
for num in reversed(data[0].split()):
status, data = self.imap.fetch(num, '(RFC822)')
self.logger.debug('Message %s\n%s\n' % (num, data[0][1]))
def parse_date(v):
if v is None:
return datetime.datetime.now()
tt = email.utils.parsedate_tz(v)
if tt is None:
return datetime.datetime.now()
timestamp = email.utils.mktime_tz(tt)
date = datetime.datetime.fromtimestamp(timestamp)
return date
def to_utf8(string, charset):
return string.decode(charset).encode('UTF-8').decode('UTF-8')
def email_nonascii_to_uft8(string):
# RFC 1342 is a recommendation that provides a way to represent non ASCII
# characters inside e-mail in a way that wont confuse e-mail servers
subject = ''
for v, charset in email.header.decode_header(string):
if charset is None:
if type(v) is bytes:
v = v.decode()
subject = subject + v
else:
subject = subject + to_utf8(v, charset)
return subject

@ -1,43 +1,85 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import json
import logging
import smtplib
from email.mime.text import MIMEText
import requests
from conf import config
from core import imap
from model.email import Email
logger = logging.getLogger(__name__)
def fetch():
mails = []
r = requests.get(config.get(config.MAILER_URL) + "/mbox")
if r.status_code == 200:
payload = r.json()
if payload["count"] > 0:
mails = payload["emails"]
return mails
def _open_mailbox():
return imap.Mailbox(
config.get(config.IMAP_HOST),
config.get_int(config.IMAP_PORT),
config.get_bool(config.IMAP_SSL),
config.get(config.IMAP_LOGIN),
config.get(config.IMAP_PASSWORD),
)
def get(id):
payload = None
r = requests.get(config.get(config.MAILER_URL) + "/mbox/" + str(id))
if r.status_code == 200:
payload = r.json()
return payload
def _to_dto(msg):
content = 'no plain-text part found in email'
for part in msg['parts']:
if part['content-type'] == 'text/plain':
content = part['content']
break
return Email(
id=msg['index'],
encoding=msg['encoding'],
date=msg['datetime'],
from_addr=msg['from'],
to_addr=msg['to'],
subject=msg['subject'],
content=content,
)
def fetch():
msgs = []
try:
with _open_mailbox() as mbox:
count = mbox.get_count()
for num in range(count):
msg = _to_dto(mbox.fetch_message(num + 1))
msgs.append(msg)
except:
logger.exception('fetch mail exception')
return msgs
def send(to_email, subject, message):
headers = {"Content-Type": "application/json; charset=utf-8"}
msg = {"to": to_email, "subject": subject, "content": message}
r = requests.post(
config.get(config.MAILER_URL) + "/mbox", data=json.dumps(msg), headers=headers
)
if r.status_code in (200, 201):
logger.debug("Email for %s posted" % to_email)
else:
logger.warn("Cannot post email for %s" % to_email)
# Create the container (outer) email message.
msg = MIMEText(message)
msg['Subject'] = subject
msg['To'] = to_email
msg['From'] = config.get(config.SMTP_LOGIN)
success = True
try:
s = smtplib.SMTP(config.get(config.SMTP_HOST), config.getInt(config.SMTP_PORT))
if config.get_bool(config.SMTP_STARTTLS):
s.starttls()
s.login(config.get(config.SMTP_LOGIN), config.get(config.SMTP_PASSWORD))
s.send_message(msg)
s.quit()
except:
logger.exception('send mail exception')
success = False
return success
def delete(id):
requests.delete(config.get(config.MAILER_URL) + "/mbox/" + str(id))
try:
with _open_mailbox() as mbox:
mbox.delete_message(id)
except:
logger.exception('delete mail exception')

@ -2,15 +2,18 @@
# -*- coding: UTF-8 -*-
from datetime import datetime
import markdown
import PyRSS2Gen
from model.site import Site
from model.comment import Comment
from core.templater import get_template
from conf import config
from core.templater import get_template
from model.comment import Comment
from model.site import Site
def generate_all():
for site in Site.select():
generate_site(site.token)
@ -18,7 +21,7 @@ def generate_all():
def generate_site(token):
site = Site.select().where(Site.token == token).get()
rss_title = get_template("rss_title_message").render(site=site.name)
rss_title = get_template('rss_title_message').render(site=site.name)
md = markdown.Markdown()
items = []
@ -29,24 +32,23 @@ def generate_site(token):
.order_by(-Comment.published)
.limit(10)
):
item_link = "%s://%s%s" % (config.get(config.RSS_PROTO), site.url, row.url)
item_link = '%s://%s%s' % (config.get(config.RSS_PROTO), site.url, row.url)
items.append(
PyRSS2Gen.RSSItem(
title="%s - %s://%s%s"
title='%s - %s://%s%s'
% (config.get(config.RSS_PROTO), row.author_name, site.url, row.url),
link=item_link,
description=md.convert(row.content),
guid=PyRSS2Gen.Guid("%s/%d" % (item_link, row.id)),
guid=PyRSS2Gen.Guid('%s/%d' % (item_link, row.id)),
pubDate=row.published,
)
)
rss = PyRSS2Gen.RSS2(
title=rss_title,
link="%s://%s" % (config.get(config.RSS_PROTO), site.url),
description="Commentaires du site '%s'" % site.name,
link='%s://%s' % (config.get(config.RSS_PROTO), site.url),
description='Commentaires du site "%s"' % site.name,
lastBuildDate=datetime.now(),
items=items,
)
rss.write_xml(open(config.get(config.RSS_FILE), "w"), encoding="utf-8")
rss.write_xml(open(config.get(config.RSS_FILE), 'w'), encoding='utf-8')

@ -2,14 +2,15 @@
# -*- coding: utf-8 -*-
import os
from jinja2 import Environment
from jinja2 import FileSystemLoader
from jinja2 import Environment, FileSystemLoader
from conf import config
current_path = os.path.dirname(__file__)
template_path = os.path.abspath(os.path.join(current_path, "../templates"))
template_path = os.path.abspath(os.path.join(current_path, '../templates'))
env = Environment(loader=FileSystemLoader(template_path))
def get_template(name):
return env.get_template(config.get(config.LANG) + "/" + name + ".tpl")
return env.get_template(config.get(config.LANG) + '/' + name + '.tpl')

@ -1,16 +0,0 @@
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import hashlib
from conf import config
def salt(value):
string = "%s%s" % (value, config.get(config.SECURITY_SALT))
dk = hashlib.sha256(string.encode())
return dk.hexdigest()
def md5(value):
dk = hashlib.md5(value.encode())
return dk.hexdigest()

@ -2,29 +2,31 @@
# -*- coding: utf-8 -*-
import logging
from flask import request, jsonify, abort
from model.site import Site
from model.comment import Comment
from flask import abort, jsonify, request
from conf import config
from model.comment import Comment
from model.site import Site
logger = logging.getLogger(__name__)
app = config.flaskapp()
@app.route("/ping", methods=["GET"])
@app.route('/ping', methods=['GET'])
def ping():
return "OK"
return 'OK'
@app.route("/comments", methods=["GET"])
@app.route('/comments', methods=['GET'])
def query_comments():
comments = []
try:
token = request.args.get("token", "")
url = request.args.get("url", "")
token = request.args.get('token', '')
url = request.args.get('url', '')
logger.info("retrieve comments for token %s, url %s" % (token, url))
logger.info('retrieve comments for url %s' % (url))
for comment in (
Comment.select(Comment)
.join(Site)
@ -35,29 +37,30 @@ def query_comments():
)
.order_by(+Comment.published)
):
d = {}
d["author"] = comment.author_name
d["content"] = comment.content
d = {
'author': comment.author_name,
'content': comment.content,
'avatar': comment.author_gravatar,
'date': comment.published.strftime('%Y-%m-%d %H:%M:%S')
}
if comment.author_site:
d["site"] = comment.author_site
d["avatar"] = comment.author_gravatar
d["date"] = comment.published.strftime("%Y-%m-%d %H:%M:%S")
d['site'] = comment.author_site
logger.debug(d)
comments.append(d)
r = jsonify({"data": comments})
r = jsonify({'data': comments})
r.status_code = 200
except:
logger.warn("bad request")
r = jsonify({"data": []})
logger.warn('bad request')
r = jsonify({'data': []})
r.status_code = 400
return r
@app.route("/comments/count", methods=["GET"])
@app.route('/comments/count', methods=['GET'])
def get_comments_count():
try:
token = request.args.get("token", "")
url = request.args.get("url", "")
token = request.args.get('token', '')
url = request.args.get('url', '')
count = (
Comment.select(Comment)
.join(Site)
@ -68,9 +71,9 @@ def get_comments_count():
)
.count()
)
r = jsonify({"count": count})
r = jsonify({'count': count})
r.status_code = 200
except:
r = jsonify({"count": 0})
r = jsonify({'count': 0})
r.status_code = 200
return r

@ -3,52 +3,53 @@
import logging
from datetime import datetime
from flask import request, abort, redirect
from model.site import Site
from model.comment import Comment
from flask import abort, redirect, request
from conf import config
from helper.hashing import md5
from model.comment import Comment
from model.site import Site
logger = logging.getLogger(__name__)
app = config.flaskapp()
@app.route("/newcomment", methods=["POST"])
@app.route('/newcomment', methods=['POST'])
def new_form_comment():
try:
data = request.form
logger.info("form data " + str(data))
logger.info('form data ' + str(data))
# validate token: retrieve site entity
token = data.get("token", "")
token = data.get('token', '')
site = Site.select().where(Site.token == token).get()
if site is None:
logger.warn("Unknown site %s" % token)
logger.warn('Unknown site %s' % token)
abort(400)
# honeypot for spammers
captcha = data.get("remarque", "")
captcha = data.get('remarque', '')
if captcha:
logger.warn("discard spam: data %s" % data)
logger.warn('discard spam: data %s' % data)
abort(400)
url = data.get("url", "")
author_name = data.get("author", "").strip()
author_gravatar = data.get("email", "").strip()
author_site = data.get("site", "").lower().strip()
if author_site and author_site[:4] != "http":
author_site = "http://" + author_site
message = data.get("message", "")
url = data.get('url', '')
author_name = data.get('author', '').strip()
author_gravatar = data.get('email', '').strip()
author_site = data.get('site', '').lower().strip()
if author_site and author_site[:4] != 'http':
author_site = 'http://' + author_site
message = data.get('message', '')
# anti-spam again
if not url or not author_name or not message:
logger.warn("empty field: data %s" % data)
logger.warn('empty field: data %s' % data)
abort(400)
check_form_data(data)
# add a row to Comment table
created = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
created = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
comment = Comment(
site=site,
url=url,
@ -63,18 +64,18 @@ def new_form_comment():
comment.save()
except:
logger.exception("new comment failure")
logger.exception('new comment failure')
abort(400)
return redirect("/redirect/", code=302)
return redirect('/redirect/', code=302)
def check_form_data(data):
fields = ["url", "message", "site", "remarque", "author", "token", "email"]
fields = ['url', 'message', 'site', 'remarque', 'author', 'token', 'email']
d = data.to_dict()
for field in fields:
if field in d:
del d[field]
if d:
logger.warn("additional field: data %s" % data)
logger.warn('additional field: data %s' % data)
abort(400)

@ -17,18 +17,18 @@ class Comment(Model):
notified = DateTimeField(null=True, default=None)
published = DateTimeField(null=True, default=None)
author_name = CharField()
author_site = CharField(default="")
author_gravatar = CharField(default="")
author_site = CharField(default='')
author_gravatar = CharField(default='')
content = TextField()
site = ForeignKeyField(Site, related_name="site")
site = ForeignKeyField(Site, related_name='site')
class Meta:
database = get_db()
def notify_site_admin(self):
self.notified = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.notified = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.save()
def publish(self):
self.published = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.published = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.save()

@ -0,0 +1,14 @@
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from typing import NamedTuple
from datetime import datetime
class Email(NamedTuple):
id: int
encoding: str
date: datetime
from_addr: str
to_addr: str
subject: str
content: str

@ -2,12 +2,15 @@
# -*- coding: UTF-8 -*-
import argparse
import os
import logging
import os
from flask import Flask
from flask_apscheduler import APScheduler
from conf import config
# configure logging
def configure_logging(level):
root_logger = logging.getLogger()
@ -15,7 +18,7 @@ def configure_logging(level):
ch = logging.StreamHandler()
ch.setLevel(level)
# create formatter
formatter = logging.Formatter("[%(asctime)s] %(name)s %(levelname)s %(message)s")
formatter = logging.Formatter('[%(asctime)s] %(name)s %(levelname)s %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
@ -26,21 +29,21 @@ class JobConfig(object):
JOBS = []
SCHEDULER_EXECUTORS = {"default": {"type": "threadpool", "max_workers": 20}}
SCHEDULER_EXECUTORS = {'default': {'type': 'threadpool', 'max_workers': 4}}
def __init__(self, mail_polling_seconds, new_comment_polling_seconds):
def __init__(self, imap_polling_seconds, new_comment_polling_seconds):
self.JOBS = [
{
"id": "fetch_mail",
"func": "core.cron:fetch_mail_answers",
"trigger": "interval",
"seconds": mail_polling_seconds,
'id': 'fetch_mail',
'func': 'core.cron:fetch_mail_answers',
'trigger': 'interval',
'seconds': imap_polling_seconds,
},
{
"id": "submit_new_comment",
"func": "core.cron:submit_new_comment",
"trigger": "interval",
"seconds": new_comment_polling_seconds,
'id': 'submit_new_comment',
'func': 'core.cron:submit_new_comment',
'trigger': 'interval',
'seconds': new_comment_polling_seconds,
},
]
@ -53,8 +56,8 @@ def stacosys_server(config_pathname):
# configure logging
logger = logging.getLogger(__name__)
configure_logging(logging.INFO)
logging.getLogger("werkzeug").level = logging.WARNING
logging.getLogger("apscheduler.executors").level = logging.WARNING
logging.getLogger('werkzeug').level = logging.WARNING
logging.getLogger('apscheduler.executors').level = logging.WARNING
# initialize database
from core import database
@ -64,14 +67,14 @@ def stacosys_server(config_pathname):
# cron email fetcher
app.config.from_object(
JobConfig(
config.getInt(config.MAIL_POLLING), config.getInt(config.COMMENT_POLLING)
config.get_int(config.IMAP_POLLING), config.get_int(config.COMMENT_POLLING)
)
)
scheduler = APScheduler()
scheduler.init_app(app)
scheduler.start()
logger.info("Start Stacosys application")
logger.info('Start Stacosys application')
# generate RSS for all sites
from core import rss
@ -80,10 +83,12 @@ def stacosys_server(config_pathname):
# start Flask
from interface import api
logger.info('Load interface %s' % api)
from interface import form
logger.debug("Load interface %s" % api)
logger.debug("Load interface %s" % form)
logger.info('Load interface %s' % form)
app.run(
host=config.get(config.HTTP_HOST),
@ -93,8 +98,8 @@ def stacosys_server(config_pathname):
)
if __name__ == "__main__":
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("config", help="config path name")
parser.add_argument('config', help='config path name')
args = parser.parse_args()
stacosys_server(args.config)

Loading…
Cancel
Save