WIP: improve events retrieval and add some statistics
This commit is contained in:
parent
25532bf21b
commit
450bee6a0d
105
events.py
105
events.py
@ -1,16 +1,23 @@
|
||||
#!/usr/bin/env python3
|
||||
from collections import defaultdict
|
||||
from lxml import etree
|
||||
import os
|
||||
import datetime
|
||||
import itertools
|
||||
import re
|
||||
import requests
|
||||
|
||||
from common import *
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
import requests
|
||||
from lxml import etree
|
||||
|
||||
from common import doors
|
||||
from hid.DoorController import ROOT, E
|
||||
|
||||
|
||||
def getStrings(door):
|
||||
"""Parses out the message strings from source."""
|
||||
r = requests.get('https://' + door.ip + '/html/en_EN/en_EN.js',
|
||||
auth=requests.auth.HTTPDigestAuth(door.username, door.password),
|
||||
auth=requests.auth.HTTPDigestAuth(door.username,
|
||||
door.password),
|
||||
verify=False)
|
||||
regex = re.compile(r'([0-9]+)="([^"]*)')
|
||||
strings = [regex.search(s) for s in r.text.split(';')
|
||||
@ -18,46 +25,39 @@ def getStrings(door):
|
||||
print({int(g.group(1)): g.group(2) for g in strings})
|
||||
|
||||
def getMessages(door):
|
||||
# get parameters for messages to get?
|
||||
# honestly not really sure why this is required, their API is confusing
|
||||
parXMLIn = E_plain.VertXMessage(
|
||||
E.EventMessages({"action": "LR"}))
|
||||
parXMLOut = door.doXMLRequest(parXMLIn)
|
||||
etree.dump(parXMLOut)
|
||||
events = None
|
||||
recordCount = 0
|
||||
moreRecords = True
|
||||
|
||||
if os.path.exists("logs/" + door.name + ".xml"):
|
||||
# read last log
|
||||
tree = etree.ElementTree(file="logs/" + door.name + ".xml")
|
||||
root = tree.getroot()
|
||||
recordCount = int(parXMLOut[0].attrib["historyRecordMarker"]) - \
|
||||
int(root[0][0].attrib["recordMarker"])
|
||||
else:
|
||||
# first run for this door
|
||||
root = None
|
||||
recordCount = 1000
|
||||
while moreRecords:
|
||||
res = door.doXMLRequest(ROOT(
|
||||
E.EventMessages({
|
||||
"action": "LR",
|
||||
"recordCount": str(1000 - recordCount),
|
||||
"recordOffset": str(recordCount),
|
||||
})))
|
||||
if events is None:
|
||||
events = res[0]
|
||||
else:
|
||||
for event in res[0]:
|
||||
events.append(event)
|
||||
|
||||
if recordCount == 0:
|
||||
print("No records to get!")
|
||||
return
|
||||
print("Getting", recordCount, "records")
|
||||
# get the actual messages
|
||||
eventsXMLIn = E_plain.VertXMessage(
|
||||
E.EventMessages({"action": "LR",
|
||||
"recordCount": str(recordCount),
|
||||
"historyRecordMarker": parXMLOut[0].attrib["historyRecordMarker"],
|
||||
"historyTimestamp": parXMLOut[0].attrib["historyTimestamp"]}))
|
||||
eventsXMLOut = door.doXMLRequest(eventsXMLIn)
|
||||
#TODO: handle modeRecords=true
|
||||
recordCount += int(res[0].get('recordCount'))
|
||||
moreRecords = res[0].get('moreRecords') == 'true'
|
||||
|
||||
for index, event in enumerate(eventsXMLOut[0]):
|
||||
event.attrib["recordMarker"] = str(int(parXMLOut[0].attrib["historyRecordMarker"]) - index)
|
||||
print(recordCount, moreRecords)
|
||||
|
||||
if root is None:
|
||||
tree = etree.ElementTree(eventsXMLOut)
|
||||
else:
|
||||
for event in reversed(eventsXMLOut[0]):
|
||||
root[0].insert(0, event)
|
||||
tree.write("logs/" + doorName + ".xml")
|
||||
etree.dump(events, pretty_print=True)
|
||||
|
||||
return events
|
||||
|
||||
# def stats(events):
|
||||
# eventsByDay = {k: list(v) for k, v in
|
||||
# itertools.groupby(sorted(events, key=get_day), key=get_day)}
|
||||
|
||||
# print({k: len(v) for k, v in eventsByDay.items()})
|
||||
|
||||
# #print([get_day(e) for e in events])
|
||||
|
||||
def main():
|
||||
for door in doors.values():
|
||||
@ -65,3 +65,24 @@ def main():
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
events = getMessages(doors["Studio Space"])
|
||||
|
||||
# stats(events)
|
||||
|
||||
df = pd.DataFrame([dict(e.attrib) for e in events])
|
||||
idx = pd.to_datetime(df['timestamp'], format='%Y-%m-%dT%H:%M:%S')
|
||||
df = df.set_index(pd.DatetimeIndex(idx.values)).drop('timestamp', axis=1)
|
||||
print()
|
||||
print(df[df.eventType == '2020'].dropna(axis=1, how='all').head())
|
||||
|
||||
entriesPerDay = df[df.eventType == '2020'] \
|
||||
.dropna(axis=1, how='all') \
|
||||
.resample('1D') \
|
||||
.count()['eventType']
|
||||
|
||||
entriesPerDay.index = entriesPerDay.index.map(lambda t: t.strftime('%Y-%m-%d'))
|
||||
|
||||
print(df.groupby(by=['forename', 'surname']).size().sort_values())
|
||||
|
||||
entriesPerDay.plot(kind='bar')
|
||||
|
Reference in New Issue
Block a user