sqlExport: Use both CSV and json transaction sources to get more data

This commit is contained in:
Adam Goldsmith 2020-02-29 10:58:27 -05:00
parent 65b79cad99
commit b417821703
2 changed files with 31 additions and 7 deletions

View File

@ -110,7 +110,16 @@ try:
insertLabels(members)
print("Getting/Updating transactions...")
transactions = membershipworks.get_transactions(datetime(2020, 1, 1), datetime.now())
now = datetime.now()
transactions_csv = membershipworks.get_transactions(datetime(2020, 1, 1), now)
transactions_json = membershipworks.get_transactions(
datetime(2020, 1, 1), now, json=True)
# this is terrible, but as long as the dates are the same, should be fiiiine
transactions = [{**j, **v}
for j, v in zip(transactions_csv, transactions_json)]
assert all([t['Name'] == t['nam'] and t['Account ID'] == t.get('uid', '')
and t['Payment ID'] == t.get('sid', '')
for t in transactions])
insertFromTableMap('transactions', transactions, tableMapping['transactions'])
print("Committing changes...")

View File

@ -69,13 +69,28 @@ members:
'IP Address':
transactions:
'sid': {type: CHAR(27) PRIMARY KEY}
'sid': {type: CHAR(27)}
'uid': {type: CHAR(24)}
'timestamp': {type: 'INT(11)', source: '_dp'} # TODO: should be a real timestamp?
'type': {type: INTEGER, source: 'typ'} # transaction type
'currency': {source: 'cur'}
# TODO: this is a terrible PK
'timestamp': {type: 'INT(11) PRIMARY KEY', source: '_dp'} # TODO: should be a real timestamp?
'type': {source: 'Transaction Type'}
'sum': {type: 'DECIMAL(13,4)'}
'fee': {type: 'DECIMAL(13,4)'}
'name': {source: 'nam'}
'event_id': {source: 'eid'}
'ttl': # 'For'
'For':
'Items':
'Discount Code':
'Note':
# this is painful, but necessary because some users have no uid
# TODO: fix this horribleness
'Name':
'Contact Person':
'Full Address':
'Street':
'City':
'State/Province':
'Postal Code':
'Country':
'Phone':
'Email':