|
|
@ -305,8 +305,8 @@ def new_publish(args, publish_type, vehicle, language, version): |
|
|
|
full_update_file_name = full_update_path + vehicle + '/' + \ |
|
|
|
'%s-%s-full_v%s.json' % (vehicle, language, version) |
|
|
|
|
|
|
|
new_full_update_file_name = full_update_path + vehicle + '/' + \ |
|
|
|
'%s-%s-full_update.json' % (vehicle, language) |
|
|
|
# new_full_update_file_name = full_update_path + vehicle + '/' + \ |
|
|
|
# '%s-%s-full_update.json' % (vehicle, language) |
|
|
|
""" Since full update is not available for Global version 1 """ |
|
|
|
|
|
|
|
if str(version) == '1.00': |
|
|
@ -437,8 +437,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
for old_sub_system in old_system['Subsystems']: |
|
|
|
if old_sub_system['sub_systems'] == subsystem['sub_systems']: |
|
|
|
for old_pd in old_sub_system['Procedure_details']: |
|
|
|
proc_det_name.add( |
|
|
|
old_pd['kilometer_name']) |
|
|
|
# proc_det_name.add( |
|
|
|
# old_pd['kilometer_name']) |
|
|
|
if old_pd['procedure_name'] not in proc_det_added and \ |
|
|
|
old_pd['procedure_name'] in proc_det_name: |
|
|
|
sub_system_list.append( |
|
|
@ -506,7 +506,7 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
"active_status": system['active_status'], |
|
|
|
"Assets": system_asset_list, |
|
|
|
"Subsystems": sub_system_list, |
|
|
|
"Config Kilometer": conf_km_list, |
|
|
|
# "Config Kilometer": conf_km_list, |
|
|
|
}) |
|
|
|
system_added.add(system['system_name']) |
|
|
|
except: |
|
|
@ -544,9 +544,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
data_consolidated['JSON'].append({'Variant': variant_old['Variant'], |
|
|
|
}) |
|
|
|
|
|
|
|
json_object = json.dumps(data_consolidated) |
|
|
|
with open(out_file_path, "w") as outfile: |
|
|
|
outfile.write(json_object) |
|
|
|
outfile.write(json.dumps(data_consolidated,indent =4)) |
|
|
|
|
|
|
|
return True, out_file_path |
|
|
|
except Exception as e: |
|
|
@ -650,8 +649,8 @@ def new_merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
for old_sub_system in old_system['Subsystems']: |
|
|
|
if old_sub_system['sub_systems'] == subsystem['sub_systems']: |
|
|
|
for old_pd in old_sub_system['Procedure_details']: |
|
|
|
proc_det_name.add( |
|
|
|
old_pd['kilometer_name']) |
|
|
|
# proc_det_name.add( |
|
|
|
# old_pd['kilometer_name']) |
|
|
|
if old_pd['procedure_name'] not in proc_det_added and \ |
|
|
|
old_pd['procedure_name'] in proc_det_name: |
|
|
|
sub_system_list.append( |
|
|
@ -719,7 +718,7 @@ def new_merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
"active_status": system['active_status'], |
|
|
|
"Assets": system_asset_list, |
|
|
|
"Subsystems": sub_system_list, |
|
|
|
"Config Kilometer": conf_km_list, |
|
|
|
# "Config Kilometer": conf_km_list, |
|
|
|
}) |
|
|
|
system_added.add(system['system_name']) |
|
|
|
except: |
|
|
@ -756,9 +755,9 @@ def new_merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
|
if variant_old['Variant']['name'] in variant_name and variant_old['Variant']['name'] not in variant_added: |
|
|
|
data_consolidated['JSON'].append({'Variant': variant_old['Variant'], |
|
|
|
}) |
|
|
|
json_object = json.dumps(data_consolidated) |
|
|
|
# json_object = json.dumps(data_consolidated, indent=4) |
|
|
|
with open(out_file_path, "w") as outfile: |
|
|
|
outfile.write(json_object) |
|
|
|
outfile.write(json.dumps(data_consolidated, indent=4)) |
|
|
|
|
|
|
|
return True, out_file_path |
|
|
|
except Exception as e: |
|
|
|