some usable data in db
This commit is contained in:
parent
895ab5de04
commit
b00a1945bf
Binary file not shown.
|
@ -31,6 +31,7 @@ def csvToJSON():
|
||||||
'''
|
'''
|
||||||
function to copy over values of AM N PM + Schedule from previous row, reading from Atlas.json, writing to atlasCopied.json
|
function to copy over values of AM N PM + Schedule from previous row, reading from Atlas.json, writing to atlasCopied.json
|
||||||
(fill in blank rows where 'copy from previous' is assumed, and create new json file - step 2)
|
(fill in blank rows where 'copy from previous' is assumed, and create new json file - step 2)
|
||||||
|
NOTE: this omits routes for which we dont have routeAlias to code mapping for in routeMapping.json
|
||||||
'''
|
'''
|
||||||
def processJSON():
|
def processJSON():
|
||||||
routeErrors = {'routes': [], 'others': []}
|
routeErrors = {'routes': [], 'others': []}
|
||||||
|
@ -83,7 +84,6 @@ def groupUnique():
|
||||||
|
|
||||||
outDict[key] = []
|
outDict[key] = []
|
||||||
for row in routes[key]:
|
for row in routes[key]:
|
||||||
i = 0
|
|
||||||
print key
|
print key
|
||||||
d = {
|
d = {
|
||||||
'from': row[7],
|
'from': row[7],
|
||||||
|
@ -91,20 +91,21 @@ def groupUnique():
|
||||||
'span': row[13],
|
'span': row[13],
|
||||||
'is_full': False,
|
'is_full': False,
|
||||||
# 'schedule': row[28],
|
# 'schedule': row[28],
|
||||||
'rows': {
|
# 'rows': {
|
||||||
row[-5]: row
|
# row[-5]: row
|
||||||
}
|
# }
|
||||||
}
|
}
|
||||||
matchedRow = isNotUnique(d, outDict[key])
|
matchedRow = isNotUnique(d, outDict[key])
|
||||||
schedule = row[-5]
|
schedule = row[-5]
|
||||||
if matchedRow:
|
if matchedRow is not None:
|
||||||
outDict[key][i-1]['rows'][schedule] = row
|
outDict[key][matchedRow]['rows'][schedule] = row
|
||||||
else:
|
else:
|
||||||
if isLargestSpan(d, routes[key]):
|
if isLargestSpan(d, routes[key]):
|
||||||
d['is_full'] = True
|
d['is_full'] = True
|
||||||
outDict[key].append(d)
|
outDict[key].append(d)
|
||||||
outDict[key][i]['rows'][schedule] = row
|
if not outDict[key][-1].has_key('rows'):
|
||||||
i += 1
|
outDict[key][-1]['rows'] = {}
|
||||||
|
outDict[key][-1]['rows'][schedule] = row
|
||||||
|
|
||||||
outFile = open(join(PROJECT_ROOT, "../db_csv_files/uniqueRoutes.json"), "w")
|
outFile = open(join(PROJECT_ROOT, "../db_csv_files/uniqueRoutes.json"), "w")
|
||||||
outFile.write(json.dumps(outDict, indent=2))
|
outFile.write(json.dumps(outDict, indent=2))
|
||||||
|
@ -251,7 +252,7 @@ def isLargestSpan(data, arr):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
'''
|
'''
|
||||||
returns index of row if not unique, else False
|
returns index of row if not unique, else None
|
||||||
'''
|
'''
|
||||||
def isNotUnique(data, arr):
|
def isNotUnique(data, arr):
|
||||||
i = 0
|
i = 0
|
||||||
|
@ -259,7 +260,7 @@ def isNotUnique(data, arr):
|
||||||
if a['from'] == data['from'] and a['to'] == data['to']:
|
if a['from'] == data['from'] and a['to'] == data['to']:
|
||||||
return i
|
return i
|
||||||
i += 1
|
i += 1
|
||||||
return False
|
return None
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Create routeMapping.json file to map route aliases to route codes
|
Create routeMapping.json file to map route aliases to route codes
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user