2022-03-24 17:02:52 -04:00
|
|
|
import logging
|
2024-05-04 16:38:51 -04:00
|
|
|
from collections.abc import Iterable
|
2024-01-17 21:17:24 -05:00
|
|
|
from datetime import datetime, timedelta
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2024-01-29 21:48:19 -05:00
|
|
|
from django.db.models import QuerySet
|
2022-03-24 17:02:52 -04:00
|
|
|
|
2024-02-01 11:10:22 -05:00
|
|
|
from cmsmanage.django_q2_helper import q_task_group
|
2024-01-18 14:00:36 -05:00
|
|
|
from membershipworks.membershipworks_api import FieldType, MembershipWorks
|
2023-12-30 14:34:55 -05:00
|
|
|
from membershipworks.models import (
|
|
|
|
Event,
|
|
|
|
EventCategory,
|
2024-01-17 21:17:24 -05:00
|
|
|
EventExt,
|
|
|
|
Flag,
|
|
|
|
Member,
|
|
|
|
Transaction,
|
2023-12-30 14:34:55 -05:00
|
|
|
)
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-12-30 14:34:55 -05:00
|
|
|
MAX_MEETING_TIME = timedelta(hours=6)
|
|
|
|
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
def flags_for_member(csv_member, all_flags, folders):
|
|
|
|
for flag in all_flags:
|
|
|
|
if flag.type == "folder":
|
|
|
|
if csv_member["Account ID"] in folders[flag.id]:
|
|
|
|
yield flag
|
2024-01-18 14:00:36 -05:00
|
|
|
elif csv_member[flag.name] == flag.name:
|
|
|
|
yield flag
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
|
2024-05-04 16:38:51 -04:00
|
|
|
def update_flags(mw_flags) -> Iterable[Flag]:
|
2022-03-24 17:02:52 -04:00
|
|
|
for typ, flags_of_type in mw_flags.items():
|
|
|
|
for name, id in flags_of_type.items():
|
|
|
|
flag = Flag(id=id, name=name, type=typ[:-1])
|
|
|
|
flag.save()
|
|
|
|
yield flag
|
|
|
|
|
|
|
|
|
|
|
|
def scrape_members(membershipworks: MembershipWorks):
|
|
|
|
logger.info("Updating flags (labels, levels, and addons)")
|
|
|
|
flags = list(update_flags(membershipworks._parse_flags()))
|
|
|
|
|
|
|
|
logger.info("Getting folder membership")
|
|
|
|
folders = {
|
|
|
|
folder_id: membershipworks.get_member_ids([folder_name])
|
|
|
|
for folder_name, folder_id in membershipworks._parse_flags()["folders"].items()
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info("Getting/Updating members...")
|
|
|
|
members = membershipworks.get_all_members()
|
|
|
|
for csv_member in members:
|
2024-01-18 13:48:06 -05:00
|
|
|
for field in membershipworks._all_fields().values():
|
2022-03-24 17:02:52 -04:00
|
|
|
# convert checkboxes to real booleans
|
2024-01-18 14:00:36 -05:00
|
|
|
if (
|
|
|
|
field.get("typ") == FieldType.CHECKBOX.value
|
|
|
|
and field["lbl"] in csv_member
|
|
|
|
):
|
2024-01-19 15:16:47 -05:00
|
|
|
csv_member[field["lbl"]] = csv_member[field["lbl"]] == "Y"
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
# create/update member
|
2023-12-30 13:00:45 -05:00
|
|
|
member = Member.from_api_dict(csv_member)
|
2022-03-24 17:02:52 -04:00
|
|
|
member.clean_fields()
|
|
|
|
member.save()
|
|
|
|
member.flags.set(flags_for_member(csv_member, flags, folders))
|
|
|
|
|
|
|
|
|
|
|
|
def scrape_transactions(membershipworks: MembershipWorks):
|
|
|
|
now = datetime.now()
|
|
|
|
start_date = datetime(2010, 1, 1)
|
|
|
|
last_transaction = Transaction.objects.order_by("timestamp").last()
|
|
|
|
if last_transaction is not None:
|
|
|
|
# technically this has the potential to lose
|
|
|
|
# transactions, but it should be incredibly unlikely
|
|
|
|
start_date = last_transaction.timestamp + timedelta(seconds=1)
|
|
|
|
|
|
|
|
logger.info(f"Getting/Updating transactions since {start_date}...")
|
|
|
|
|
|
|
|
transactions_csv = membershipworks.get_transactions(start_date, now)
|
|
|
|
transactions_json = membershipworks.get_transactions(start_date, now, json=True)
|
|
|
|
# this is terrible, but as long as the dates are the same, should be fiiiine
|
|
|
|
transactions = [{**j, **v} for j, v in zip(transactions_csv, transactions_json)]
|
|
|
|
assert all(
|
2024-01-18 13:14:00 -05:00
|
|
|
t["Account ID"] == t.get("uid", "") and t["Payment ID"] == t.get("sid", "")
|
|
|
|
for t in transactions
|
2022-03-24 17:02:52 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
for csv_transaction in transactions:
|
2023-12-30 13:00:45 -05:00
|
|
|
Transaction.from_api_dict(csv_transaction).save()
|
2022-03-24 17:02:52 -04:00
|
|
|
|
|
|
|
|
2024-02-01 11:10:22 -05:00
|
|
|
@q_task_group("Scrape MembershipWorks Data")
|
2022-03-24 17:02:52 -04:00
|
|
|
@transaction.atomic
|
|
|
|
def scrape_membershipworks(*args, **options):
|
|
|
|
membershipworks = MembershipWorks()
|
|
|
|
membershipworks.login(
|
|
|
|
settings.MEMBERSHIPWORKS_USERNAME, settings.MEMBERSHIPWORKS_PASSWORD
|
|
|
|
)
|
|
|
|
|
|
|
|
scrape_members(membershipworks)
|
|
|
|
scrape_transactions(membershipworks)
|
2023-12-30 14:34:55 -05:00
|
|
|
|
|
|
|
|
2024-01-29 21:48:19 -05:00
|
|
|
def scrape_event_details(queryset: QuerySet[EventExt]):
|
|
|
|
membershipworks = MembershipWorks()
|
|
|
|
membershipworks.login(
|
|
|
|
settings.MEMBERSHIPWORKS_USERNAME, settings.MEMBERSHIPWORKS_PASSWORD
|
|
|
|
)
|
|
|
|
|
|
|
|
for event in queryset:
|
|
|
|
event.details = membershipworks.get_event_by_eid(event.eid)
|
2024-04-30 14:34:45 -04:00
|
|
|
event.registrations = membershipworks.get_event_registrations(event.eid)
|
2024-01-29 21:48:19 -05:00
|
|
|
event.save()
|
|
|
|
|
|
|
|
|
2024-02-01 11:10:22 -05:00
|
|
|
@q_task_group("Scrape MembershipWorks Events")
|
2023-12-30 14:34:55 -05:00
|
|
|
def scrape_events():
|
|
|
|
membershipworks = MembershipWorks()
|
|
|
|
membershipworks.login(
|
|
|
|
settings.MEMBERSHIPWORKS_USERNAME, settings.MEMBERSHIPWORKS_PASSWORD
|
|
|
|
)
|
|
|
|
|
|
|
|
data = membershipworks.get_events_list(
|
|
|
|
datetime.fromtimestamp(0), datetime.now() + timedelta(weeks=52), categories=True
|
|
|
|
)
|
|
|
|
logger.info(f"{len(data)} events retrieved!")
|
|
|
|
|
|
|
|
for category_id, category_data in enumerate(data["_st"]["evg"]):
|
|
|
|
category = EventCategory.from_api_dict(category_id, category_data)
|
|
|
|
category.clean_fields()
|
|
|
|
category.save()
|
|
|
|
|
2024-01-24 18:35:25 -05:00
|
|
|
events = Event.objects.bulk_create(
|
|
|
|
[Event.from_api_dict(event_data) for event_data in data["evt"]],
|
|
|
|
update_conflicts=True,
|
|
|
|
update_fields=[
|
|
|
|
field.attname
|
|
|
|
for field in Event._meta.get_fields()
|
|
|
|
if not (
|
|
|
|
field.auto_created
|
|
|
|
or field.many_to_many
|
|
|
|
or not field.concrete
|
|
|
|
or field.generated
|
|
|
|
or field.primary_key
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2023-12-30 14:34:55 -05:00
|
|
|
|
2024-01-24 18:35:25 -05:00
|
|
|
for event in events:
|
2023-12-30 14:34:55 -05:00
|
|
|
try:
|
|
|
|
event_ext = EventExt.objects.get(event_ptr=event)
|
|
|
|
except EventExt.DoesNotExist:
|
|
|
|
event_ext = EventExt(event_ptr=event)
|
|
|
|
# create extension model instance
|
|
|
|
event_ext.save_base(raw=True)
|
|
|
|
event_ext.refresh_from_db()
|
|
|
|
|
|
|
|
if (
|
|
|
|
event_ext.end is not None
|
|
|
|
and event_ext.end - event_ext.start < MAX_MEETING_TIME
|
|
|
|
):
|
|
|
|
meeting_times_count = event_ext.meeting_times.count()
|
|
|
|
if meeting_times_count == 0:
|
|
|
|
event_ext.meeting_times.create(start=event_ext.start, end=event_ext.end)
|
|
|
|
# if there is exactly one meeting time, it should match the event start/end
|
|
|
|
elif meeting_times_count == 1:
|
|
|
|
event_ext.meeting_times.update(start=event_ext.start, end=event_ext.end)
|
2024-01-29 21:48:19 -05:00
|
|
|
|
|
|
|
# event has no details, or last retrieval was before the event happened
|
|
|
|
if event_ext.details is None or event_ext.details_timestamp < (
|
|
|
|
event_ext.end or event_ext.start
|
|
|
|
):
|
|
|
|
event_ext.details = membershipworks.get_event_by_eid(event.eid)
|
2024-04-30 14:34:45 -04:00
|
|
|
event_ext.registrations = membershipworks.get_event_registrations(event.eid)
|
2024-01-29 21:48:19 -05:00
|
|
|
event_ext.save()
|