-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcollect.py
45 lines (37 loc) · 1.09 KB
/
collect.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
COLLECTOR:
---------
Script that wraps the complete application logic.
Here we query the WFP API and collect all available
data, storing the results in local CSV files.
'''
import scraperwiki
from collector.parser import parse
from collector.classes.mvam import mVAM
from collector.utilities.item import item
from collector.storage.csv import store_csv
from collector.storage.sqlite import store_sqlite
def main():
'''
Program wrapper.
'''
tables = ['pblStatsSum', 'pblStatsSum4Maps']
for t in tables:
m = mVAM(table=t)
output = []
records = m.query()
for record in records:
output.append(parse(record))
store_csv(data=output, path='%s.csv' % t)
store_sqlite(data=output, table=t)
if __name__ == '__main__':
try:
main()
print('%s Successfully collected mVAM data.' % item('success'))
scraperwiki.status('ok')
except Exception as e:
print('%s Failed to collected mVAM data.' % item('error'))
print(e)
scraperwiki.status('error', 'Failed to collect data.')