This commit is contained in:
Colin Goutte 2021-10-19 18:35:32 +02:00
parent 1d38cbfd9c
commit 72f5c92f3e
6 changed files with 158 additions and 275 deletions

View File

@ -1,8 +1,8 @@
[probe]
identitifant_sonde = 838266b2-fc3a-4430-95e8-f7f0d0fc9871
identifiant_sonde = 838266b2-fc3a-4430-95e8-f7f0d0fc9871
nom_sonde = SondeTest
emails = colin.goutte@free.fr
colin@cgoutte.fr
emails = 1@1
2@2

View File

@ -1,12 +1,29 @@
import os
import glob
import configparser
def read_config():
res = []
for relpath in glob.glob("confs/*.ini"):
def read_config(*, pattern="*"):
paths = []
for relpath in glob.glob(f"confs/{pattern}.ini"):
if "/api_client" in relpath:
res.append(os.path.abspath(relpath))
paths.append(os.path.abspath(relpath))
res = []
for path in paths:
conf = configparser.ConfigParser()
conf.read(path)
probe_settings = dict(conf["probe"])
emails = probe_settings["emails"]
stripped = [x.strip() for x in emails.split()]
probe_settings["emails"] = stripped
probe_settings["debug_source_path"] = path
res.append(probe_settings)
return res
def apply_config(db, settings: list[dict]):
pass

View File

@ -1,5 +1,7 @@
from typing import Optional, List
from configparser import ConfigParser
import contextlib
import logging
from pydantic import BaseModel
@ -16,6 +18,7 @@ from papi.sqlapp import crud
from papi.sqlapp import schemas
from papi.config import read_config
app = FastAPI()
templates = Jinja2Templates(directory="templates/")
@ -29,6 +32,17 @@ Base.metadata.create_all(bind=engine)
conf = ConfigParser()
conf.read("conf_notifications.ini")
configurations = read_config(pattern="*api_client*sonde*")
logger = logging.getLogger()
console = logging.StreamHandler()
console.setLevel(logging.INFO)
logger.setLevel(logging.INFO)
logger.addHandler(console)
PROBES = {}
def sondeid2notifsemails(idsonde, mapping=conf):
try:
@ -50,6 +64,32 @@ def get_db(): # pragma: no cover
db.close()
@contextlib.contextmanager
def atomic_db():
# yield from get_db()
db = SessionLocal()
try:
yield db
finally:
db.close()
def apply_config(configs: list[dict] = []):
with atomic_db() as db:
for config in configs:
identifiant = config["identifiant_sonde"]
sonde = crud.get_sonde(db, identifiant)
if sonde is None:
sonde = crud.create_sonde(db, identifiant, config["nom_sonde"])
logger.info(f"Create sonde {sonde.__dict__}")
else:
logger.info(f"{sonde.identifiant}: {sonde.nom} already exists")
PROBES[sonde.identifiant] = config
apply_config(configurations)
class Notifier:
@staticmethod
def __call__(idsonde, changes):

94
papi/relais.py Normal file
View File

@ -0,0 +1,94 @@
import requests
import urllib3
import sys
from .config import read_config
import logging as logger
import json
import math
import itertools
https = False
urllib3.disable_warnings()
session = requests.Session()
session.verify = False
logged = []
def api_login(config):
post_url = config["api_login_url"]
login_r = session.post(
post_url,
json={"login": api_credentials.user, "password": api_credentials.password},
)
logged.append(True)
if login_r.ok:
logger.info("Logged")
else: # pragma: no cover
logger.info("Login error")
return login_r
def api_fetch(config):
if not logged:
api_login()
fetched = session.get("%s/status" % apiurl)
return fetched
def api_forward(config):
post_url = ""
assert post_url
for post_url in forward_urls:
res = session.post(post_url, json=data)
print(res.ok)
print(res.json())
def forward_api_error(message=""):
forward_urls = [
# "https://papi.silib.re/sonde/test/error/",
"http://localhost:8000/sonde/838266b2-fc3a-4430-95e8-f7f0d0fc9871/error/",
# "http://localhost:8000/sonde/test/error/",
]
message = message or "Erreur inconnue"
for post_url in forward_urls:
res = session.post(post_url, json={"msg": message})
print(post_url)
print(res.ok)
print(res.json())
pass
def main(
*,
maxloop=math.inf,
login=api_login,
fetch=api_fetch,
forward=api_forward,
forward_error=forward_api_error,
pattern="relais_*"
):
loopcount = itertools.count().__next__
config = read_config(pattern=pattern)
assert len(config) == 1
config = config[0]
login(config)
while loopcount() < maxloop:
try:
current = fetch(config).json()
except Exception as E:
forward_error(str(E))
else:
forward(current)
if __name__ == "__main__":
main(maxloop=1)

View File

@ -1,260 +0,0 @@
import requests
import sys
from time import sleep
from copy import deepcopy
try:
from . import api_credentials
except ImportError:
import api_credentials
import urllib3
import math
import itertools
import datetime
import csv
import json
import logging as logger
import json
https = False
urllib3.disable_warnings()
session = requests.Session()
session.verify = False
url = "https://%s" % api_credentials.ip
apiurl = "%s/api/1.1" % url
logged = []
def login():
post_url = "%s/user/login" % apiurl
login_r = session.post(
post_url,
json={"login": api_credentials.user, "password": api_credentials.password},
)
logged.append(True)
if login_r.ok:
logger.info("Logged")
else: # pragma: no cover
logger.info("Login error")
return login_r
def forward(data):
forward_urls = [
"https://papi.silib.re/sonde/test/",
]
for post_url in forward_urls:
res = session.post(post_url, json=data)
print(res.ok)
print(res.json())
def status2list(status: dict):
keys = "id name channelType status".split(" ")
id_ = "id"
translate = {"channelType": "type"}
date = status["date"]
res = []
for key, channel in status["channels"].items():
res.append(
{"date": date, **{translate.get(key, key): channel[key] for key in keys}}
)
return sorted(res, key=lambda x: x[id_])
def strip_channel(channel, keys=None):
"""
>>> s = {'alarms': {'noSignal': True,
... 'noWatermark': None,
... 'qualityIndexStatus': None,
... 'timestampDrift': None,
... 'unexpectedWatermark': None},
... 'channelType': 'fm',
... 'id': 6,
... 'lastTimestamp': None,
... 'lastWatermarkId': None,
... 'name': 'AES-67',
... 'status': 'error'}
>>> strip_channel(s)
{'id': 6, 'name': 'AES-67', 'status': 'error'}
"""
if keys is None:
keys = "id", "name", "status"
return {k: channel[k] for k in keys}
def prepare(record):
newdict = deepcopy(record)
newdict["channels"] = dict(
sorted(channel2tuple(channel) for channel in record["channels"])
)
return newdict
def fetch():
if not logged:
login()
fetched = session.get("%s/status" % apiurl)
return fetched
def channel2tuple(channel):
return "%s#%s" % (channel["id"], channel["name"]), channel
pass
def get_status(channels, key):
d = channels["channels"].get(key, {"status": "absent"})["status"]
return d
def compare(channels, previous, current):
changes, states = [], []
for key in channels:
pstatus = get_status(previous, key)
cstatus = get_status(current, key)
state = key, pstatus, cstatus
if pstatus != cstatus:
changes.append(state)
states.append(state)
res = {}
if not changes:
return {}
disparus = [state for state in states if state[2] == "absent"]
if disparus:
res["disparus"] = disparus
apparus = [state for state in states if state[1] == "absent"]
if apparus:
res["apparus"] = apparus
res["changements"] = changes
res["etats"] = states
return res
def load_or_fetch(fetch=fetch):
try:
with open("last.json") as f:
previous = json.load(f)
# print("found")
except (FileNotFoundError, json.decoder.JSONDecodeError):
print("Nolast")
with open("last.json", "w") as f:
previous = fetch().json()
json.dump(previous, f)
return previous
def list_channels(p, c):
all_channels = sorted(set((*p["channels"], *c["channels"])))
return all_channels
def main(*, maxloop=math.inf, login=login, fetch=fetch):
loopcount = itertools.count().__next__
login()
previous = load_or_fetch()
historique = []
while loopcount() < maxloop:
try:
current = fetch().json()
except json.decoder.JSONDecodeError:
breakpoint()
forward(current)
""""
with open("last.json", "w") as f:
json.dump(current, f)
with open(raw_filename(), "a") as f:
json.dump(current, f)
current, previous = prepare(current), prepare(previous)
all_channels = sorted(set((*previous["channels"], *current["channels"])))
savelog2csv(current)
diff = compare(all_channels, previous, current)
savediff(date=current["date"], diff=diff)
if diff:
print("**********")
print(diff["changements"])
print("!!!!!!!!!!")
historique.append(diff)
previous = current
sleep(0.5)
return historique
"""
def make_id_key(channel, keys=None, sep="#", tuple_=False):
"""
This takes out the concatenation of keys, value to use is as a pair.
>>> sample = {'id': 6, 'name': 'foo'}
>>> make_id_key(sample)
{'6#foo': {'id': 6, 'name': 'foo'}}
"""
if not keys:
keys = ["id", "name"]
kvalue = sep.join(str(channel[k]) for k in keys)
if tuple_:
return kvalue, channel
return {kvalue: channel}
def raw_filename():
return "raw_" + str(datetime.date.today()).replace("-", "_") + ".json"
def log_filename():
return "log_" + str(datetime.date.today()).replace("-", "_") + ".csv"
def diff_filename():
return "diff_" + str(datetime.date.today()).replace("-", "_") + ".csv"
def savelog2csv(alert, *, filename_f=log_filename):
keys = "date id name type status".split(" ")
with open(filename_f(), "a") as f:
writer = csv.DictWriter(f, keys)
if f.tell() == 0:
writer.writeheader()
for a in status2list(alert):
writer.writerow(a)
def savediff(date, diff, *, filename=diff_filename):
keys = "date name before after".split(" ")
with open(filename(), "a") as f:
writer = csv.DictWriter(f, keys)
if f.tell() == 0:
writer.writeheader()
for d in diff:
data = {"date": date}
data.update(zip(("name", "before", "after"), d))
writer.writerow(data)
if __name__ == "__main__":
main(maxloop=1)

View File

@ -207,14 +207,6 @@ def test_notif_rendering():
assert f"{idsonde}@fqdn" in content.text.lower()
class ConfigurationTestCase(TestCase):
""" Check configuration parsing """
def test_get_config(self):
config = main.read_config()
assert len(config) == 1
class CodeCoverageTestCase(TestCase):
""" Get covergage to 100% """