|
@ -251,15 +251,15 @@ def print(sts): |
|
|
|
|
|
|
|
|
@frappe.whitelist(allow_guest=True) |
|
|
@frappe.whitelist(allow_guest=True) |
|
|
def new_publish(args, publish_type, vehicle, language, version): |
|
|
def new_publish(args, publish_type, vehicle, language, version): |
|
|
rate_res = custom_rate_limit(limit=5, seconds=15) |
|
|
# rate_res = custom_rate_limit(limit=5, seconds=15) |
|
|
if rate_res != 1: |
|
|
# if rate_res != 1: |
|
|
return rate_res |
|
|
# return rate_res |
|
|
val = input_validation(args=args, publish_type=publish_type, |
|
|
val = input_validation(args=args, publish_type=publish_type, |
|
|
vehicle=vehicle, language=language, version=version) |
|
|
vehicle=vehicle, language=language, version=version) |
|
|
if val != '': |
|
|
if val != '': |
|
|
return {'status': 0, 'error': "Parameter missing :"+val} |
|
|
return {'status': 0, 'error': "Parameter missing :"+val} |
|
|
# Create Directory for internal,global and full_update publish |
|
|
# Create Directory for internal,global and full_update publish |
|
|
list1 = [] |
|
|
# list1 = [] |
|
|
try: |
|
|
try: |
|
|
try: |
|
|
try: |
|
|
if not os.path.exists(internal_path + vehicle + '/'): |
|
|
if not os.path.exists(internal_path + vehicle + '/'): |
|
@ -274,20 +274,20 @@ def new_publish(args, publish_type, vehicle, language, version): |
|
|
|
|
|
|
|
|
# JSON generation |
|
|
# JSON generation |
|
|
json_file = json_grouping(args, language) |
|
|
json_file = json_grouping(args, language) |
|
|
file_name = '%s-%s_v%s.json' % (vehicle, language, version) |
|
|
file_name = '' |
|
|
|
|
|
base_file_name = '%s-%s_v%s.json' % (vehicle, language, version) |
|
|
# File name generation |
|
|
# File name generation |
|
|
if publish_type.lower() == 'internal': |
|
|
if publish_type.lower() == 'internal': |
|
|
file_name = internal_path + vehicle + '/' + file_name |
|
|
file_name = internal_path + vehicle + '/' + base_file_name |
|
|
elif publish_type.lower() == 'global': |
|
|
elif publish_type.lower() == 'global': |
|
|
file_name = global_path + vehicle + '/' + file_name |
|
|
file_name = global_path + vehicle + '/' + base_file_name |
|
|
else: |
|
|
else: |
|
|
return {"status": 0, 'error': 'Publish type not available'} |
|
|
return {"status": 0, 'error': 'Publish type not available'} |
|
|
|
|
|
|
|
|
# Save file (Internal/Global) |
|
|
# Save file (Internal/Global) |
|
|
with open(file_name, 'w') as outfile: |
|
|
with open(file_name, 'w') as outfile: |
|
|
outfile.write(json.dumps(json_file)) |
|
|
outfile.write(json.dumps(json_file)) |
|
|
get_step_total_count(file_name) |
|
|
# get_step_total_count(file_name) |
|
|
|
|
|
|
|
|
# Remove previous internal publish file |
|
|
# Remove previous internal publish file |
|
|
if publish_type.lower() == 'internal': |
|
|
if publish_type.lower() == 'internal': |
|
@ -301,15 +301,14 @@ def new_publish(args, publish_type, vehicle, language, version): |
|
|
pub_ver = None |
|
|
pub_ver = None |
|
|
prev_update_ver = None |
|
|
prev_update_ver = None |
|
|
if publish_type.lower() == 'global': |
|
|
if publish_type.lower() == 'global': |
|
|
|
|
|
|
|
|
full_update_file_name = full_update_path + vehicle + '/' + \ |
|
|
full_update_file_name = full_update_path + vehicle + '/' + \ |
|
|
'%s-%s-full_v%s.json' % (vehicle, language, version) |
|
|
'%s-%s-full_v%s.json' % (vehicle, language, version) |
|
|
|
|
|
|
|
|
# Since full update is not available for Global version 1 |
|
|
# Since full update is not available for Global version 1 |
|
|
if version == '1.00': |
|
|
if version == '1.00': |
|
|
with open(full_update_file_name, 'w') as outfile: |
|
|
with open(full_update_file_name, 'w') as outfile: |
|
|
outfile.write(json.dumps(json_file)) |
|
|
outfile.write(json.dumps(json_file)) |
|
|
get_step_total_count(full_update_file_name) |
|
|
|
|
|
|
|
|
# get_step_total_count(full_update_file_name) |
|
|
|
|
|
|
|
|
else: |
|
|
else: |
|
|
pub_ver = frappe.db.sql(''' SELECT vehicle,`language`,version,publish_type FROM tabPublish where vehicle = '{}' |
|
|
pub_ver = frappe.db.sql(''' SELECT vehicle,`language`,version,publish_type FROM tabPublish where vehicle = '{}' |
|
@ -319,16 +318,17 @@ def new_publish(args, publish_type, vehicle, language, version): |
|
|
prev_full_update_file = full_update_path + vehicle + '/' + \ |
|
|
prev_full_update_file = full_update_path + vehicle + '/' + \ |
|
|
'%s-%s-full_v%s.json' % (vehicle, |
|
|
'%s-%s-full_v%s.json' % (vehicle, |
|
|
language, prev_update_ver) |
|
|
language, prev_update_ver) |
|
|
list1.append(prev_full_update_file) |
|
|
|
|
|
list1.append(file_name) |
|
|
file_flag, final_update_file = merge_json_files( |
|
|
list1.append(full_update_file_name) |
|
|
|
|
|
final_update_file = merge_json_files( |
|
|
|
|
|
prev_full_update_file, file_name, full_update_file_name) |
|
|
prev_full_update_file, file_name, full_update_file_name) |
|
|
get_step_total_count(final_update_file) |
|
|
if file_flag: |
|
|
|
|
|
return True, file_name.split('public')[1] |
|
|
|
|
|
# get_step_total_count(final_update_file) |
|
|
|
|
|
|
|
|
return file_name.split('public')[1] |
|
|
return True, file_name.split('public')[1] |
|
|
except: |
|
|
except Exception as e: |
|
|
return "Failed to save file" |
|
|
# return "Failed to save file" |
|
|
|
|
|
return False, str(frappe.get_traceback()) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@frappe.whitelist(allow_guest=True) |
|
|
@frappe.whitelist(allow_guest=True) |
|
@ -381,31 +381,29 @@ def new_update(vehicle_list=None): |
|
|
|
|
|
|
|
|
@frappe.whitelist(allow_guest=True) |
|
|
@frappe.whitelist(allow_guest=True) |
|
|
def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
rate_res = custom_rate_limit(limit=5, seconds=15) |
|
|
# rate_res = custom_rate_limit(limit=5, seconds=15) |
|
|
if rate_res != 1: |
|
|
# if rate_res != 1: |
|
|
return rate_res |
|
|
# return rate_res |
|
|
val = input_validation(old_json_path=old_json_path, |
|
|
# val = input_validation(old_json_path=old_json_path, |
|
|
new_json_path=new_json_path, out_file_path=out_file_path) |
|
|
# new_json_path=new_json_path, out_file_path=out_file_path) |
|
|
if val != '': |
|
|
# if val != '': |
|
|
return {'status': 0, 'error': "Parameter missing :"+val} |
|
|
# return {'status': 0, 'error': "Parameter missing :" + val} |
|
|
|
|
|
|
|
|
try: |
|
|
try: |
|
|
with open(old_json_path) as json_file: |
|
|
with open(old_json_path, 'r') as json_file: |
|
|
data_old = json.load(json_file) |
|
|
data_old = json.load(json_file) |
|
|
with open(new_json_path) as json_file: |
|
|
with open(new_json_path, 'r') as json_file1: |
|
|
data_new = json.load(json_file) |
|
|
data_new = json.load(json_file1) |
|
|
|
|
|
|
|
|
data_consolidated = {'JSON': []} |
|
|
data_consolidated = {'JSON': []} |
|
|
|
|
|
|
|
|
# print('Collecting Variants...') |
|
|
|
|
|
variant_name = set() |
|
|
variant_name = set() |
|
|
sub_system_name = set() |
|
|
sub_system_name = set() |
|
|
procedure_name = set() |
|
|
|
|
|
step_name = set() |
|
|
|
|
|
variant_added = set() |
|
|
variant_added = set() |
|
|
for variant in data_old['JSON']: |
|
|
for variant in data_old['JSON']: |
|
|
variant_name.add(variant['Variant']['name']) |
|
|
variant_name.add(variant['Variant']['name']) |
|
|
for variant in data_new['JSON']: |
|
|
for variant in data_new['JSON']: |
|
|
variant_name.add(variant['Variant']['name']) |
|
|
variant_name.add(variant['Variant']['name']) |
|
|
# print('Comparing Variants...') |
|
|
|
|
|
|
|
|
|
|
|
for variant_new in data_new['JSON']: |
|
|
for variant_new in data_new['JSON']: |
|
|
if variant_new['Variant']['name'] in variant_name and variant_new['Variant']['name'] not in variant_added: |
|
|
if variant_new['Variant']['name'] in variant_name and variant_new['Variant']['name'] not in variant_added: |
|
@ -415,19 +413,19 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
variant_asset_name = set() |
|
|
variant_asset_name = set() |
|
|
variant_asset_added = set() |
|
|
variant_asset_added = set() |
|
|
variant_asset_list = [] |
|
|
variant_asset_list = [] |
|
|
# print(variant_new['Variant']) |
|
|
|
|
|
try: |
|
|
try: |
|
|
variant_new['Variant']['Assets'] |
|
|
# variant_new['Variant']['Assets'] |
|
|
for variant_asset in variant_new['Variant']['Assets']: |
|
|
for variant_asset in variant_new['Variant']['Assets']: |
|
|
variant_asset_name.add(variant_asset['file']) |
|
|
variant_asset_name.add(variant_asset['file']) |
|
|
if variant_asset['file'] not in variant_asset_added and variant_asset['file'] in variant_asset_name: |
|
|
if variant_asset['file'] not in variant_asset_added and variant_asset[ |
|
|
|
|
|
'file'] in variant_asset_name: |
|
|
variant_asset_list.append(variant_asset) |
|
|
variant_asset_list.append(variant_asset) |
|
|
variant_asset_added.add(variant_asset['file']) |
|
|
variant_asset_added.add(variant_asset['file']) |
|
|
except: |
|
|
except: |
|
|
pass |
|
|
pass |
|
|
# print('Assets not present in '+variant_new['Variant']['name']) |
|
|
# print('Assets not present in '+variant_new['Variant']['name']) |
|
|
try: |
|
|
try: |
|
|
variant_new['Variant']['Systems'] |
|
|
# variant_new['Variant']['Systems'] |
|
|
for system in variant_new['Variant']['Systems']: |
|
|
for system in variant_new['Variant']['Systems']: |
|
|
system_asset_name = set() |
|
|
system_asset_name = set() |
|
|
system_asset_added = set() |
|
|
system_asset_added = set() |
|
@ -447,10 +445,10 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
conf_km_name.add(ck['kilometer_name']) |
|
|
conf_km_name.add(ck['kilometer_name']) |
|
|
if ck['kilometer_name'] not in conf_km_added and ck['kilometer_name'] in conf_km_name: |
|
|
if ck['kilometer_name'] not in conf_km_added and ck['kilometer_name'] in conf_km_name: |
|
|
conf_km_list.append({ |
|
|
conf_km_list.append({ |
|
|
"kilometer_name": ck['kilometer_name'], |
|
|
"kilometer_name": ck['kilometer_name'], |
|
|
"kilometer_IDX": ck['kilometer_IDX'], |
|
|
"kilometer_IDX": ck['kilometer_IDX'], |
|
|
"kilometers_applicable": ck['kilometers_applicable'] |
|
|
"kilometers_applicable": ck['kilometers_applicable'] |
|
|
}) |
|
|
}) |
|
|
conf_km_added.add( |
|
|
conf_km_added.add( |
|
|
ck['kilometer_name']) |
|
|
ck['kilometer_name']) |
|
|
for old_variants in data_old['JSON']: |
|
|
for old_variants in data_old['JSON']: |
|
@ -462,7 +460,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
for old_ck in old_sub_system['Config Kilometer']: |
|
|
for old_ck in old_sub_system['Config Kilometer']: |
|
|
conf_km_name.add( |
|
|
conf_km_name.add( |
|
|
old_ck['kilometer_name']) |
|
|
old_ck['kilometer_name']) |
|
|
if old_ck['kilometer_name'] not in conf_km_added and old_ck['kilometer_name'] in conf_km_name: |
|
|
if old_ck['kilometer_name'] not in conf_km_added and \ |
|
|
|
|
|
old_ck['kilometer_name'] in conf_km_name: |
|
|
sub_system_list.append( |
|
|
sub_system_list.append( |
|
|
old_ck) |
|
|
old_ck) |
|
|
conf_km_added.add( |
|
|
conf_km_added.add( |
|
@ -472,7 +471,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
proc_det_list = [] |
|
|
proc_det_list = [] |
|
|
for pd in subsystem['Procedure_details']: |
|
|
for pd in subsystem['Procedure_details']: |
|
|
proc_det_name.add(pd['procedure_name']) |
|
|
proc_det_name.add(pd['procedure_name']) |
|
|
if pd['procedure_name'] not in proc_det_added and pd['procedure_name'] in proc_det_name: |
|
|
if pd['procedure_name'] not in proc_det_added and pd[ |
|
|
|
|
|
'procedure_name'] in proc_det_name: |
|
|
proc_det_list.append({ |
|
|
proc_det_list.append({ |
|
|
"procedure_name": pd['procedure_name'], |
|
|
"procedure_name": pd['procedure_name'], |
|
|
"steps": pd['steps'], |
|
|
"steps": pd['steps'], |
|
@ -488,7 +488,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
for old_pd in old_sub_system['Procedure_details']: |
|
|
for old_pd in old_sub_system['Procedure_details']: |
|
|
proc_det_name.add( |
|
|
proc_det_name.add( |
|
|
old_pd['kilometer_name']) |
|
|
old_pd['kilometer_name']) |
|
|
if old_pd['procedure_name'] not in proc_det_added and old_pd['procedure_name'] in proc_det_name: |
|
|
if old_pd['procedure_name'] not in proc_det_added and \ |
|
|
|
|
|
old_pd['procedure_name'] in proc_det_name: |
|
|
sub_system_list.append( |
|
|
sub_system_list.append( |
|
|
old_pd) |
|
|
old_pd) |
|
|
proc_det_added.add( |
|
|
proc_det_added.add( |
|
@ -518,14 +519,16 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
for old_sub_system in old_system['Subsystems']: |
|
|
for old_sub_system in old_system['Subsystems']: |
|
|
sub_system_name.add( |
|
|
sub_system_name.add( |
|
|
old_sub_system['sub_systems']) |
|
|
old_sub_system['sub_systems']) |
|
|
if old_sub_system['sub_systems'] not in sub_system_added and old_sub_system['sub_systems'] in sub_system_name: |
|
|
if old_sub_system['sub_systems'] not in sub_system_added and \ |
|
|
|
|
|
old_sub_system['sub_systems'] in sub_system_name: |
|
|
sub_system_list.append( |
|
|
sub_system_list.append( |
|
|
old_sub_system) |
|
|
old_sub_system) |
|
|
sub_system_added.add( |
|
|
sub_system_added.add( |
|
|
old_sub_system['sub_systems']) |
|
|
old_sub_system['sub_systems']) |
|
|
for asset in system['Assets']: |
|
|
for asset in system['Assets']: |
|
|
system_asset_name.add(asset['system_asset']) |
|
|
system_asset_name.add(asset['system_asset']) |
|
|
if asset['system_asset'] not in system_asset_added and asset['system_asset'] in system_asset_name: |
|
|
if asset['system_asset'] not in system_asset_added and asset[ |
|
|
|
|
|
'system_asset'] in system_asset_name: |
|
|
system_asset_list.append({"system_asset": asset['system_asset'], |
|
|
system_asset_list.append({"system_asset": asset['system_asset'], |
|
|
"systemdisplayorder": asset['systemdisplayorder'], |
|
|
"systemdisplayorder": asset['systemdisplayorder'], |
|
|
"active_status": asset['active_status'], |
|
|
"active_status": asset['active_status'], |
|
@ -539,7 +542,8 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
for old_system_asset in old_system['Assets']: |
|
|
for old_system_asset in old_system['Assets']: |
|
|
system_asset_name.add( |
|
|
system_asset_name.add( |
|
|
old_system_asset['system_asset']) |
|
|
old_system_asset['system_asset']) |
|
|
if old_system_asset['system_asset'] not in system_asset_added and old_system_asset['system_asset'] in system_asset_name: |
|
|
if old_system_asset['system_asset'] not in system_asset_added and \ |
|
|
|
|
|
old_system_asset['system_asset'] in system_asset_name: |
|
|
system_asset_list.append( |
|
|
system_asset_list.append( |
|
|
old_system_asset) |
|
|
old_system_asset) |
|
|
system_asset_added.add( |
|
|
system_asset_added.add( |
|
@ -573,12 +577,14 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
"variant": variant_new['Variant']['variant'], |
|
|
"variant": variant_new['Variant']['variant'], |
|
|
"vehicle": variant_new['Variant']['vehicle'], |
|
|
"vehicle": variant_new['Variant']['vehicle'], |
|
|
"family_code": variant_new['Variant']['family_code'], |
|
|
"family_code": variant_new['Variant']['family_code'], |
|
|
"vehicle_segment": variant_new['Variant']['vehicle_segment'], |
|
|
"vehicle_segment": variant_new['Variant'][ |
|
|
|
|
|
'vehicle_segment'], |
|
|
"fuel": variant_new['Variant']['fuel'], |
|
|
"fuel": variant_new['Variant']['fuel'], |
|
|
"transmission": variant_new['Variant']['transmission'], |
|
|
"transmission": variant_new['Variant']['transmission'], |
|
|
"drive": variant_new['Variant']['drive'], |
|
|
"drive": variant_new['Variant']['drive'], |
|
|
"active_status": variant_new['Variant']['active_status'], |
|
|
"active_status": variant_new['Variant']['active_status'], |
|
|
"kms_mapping_active_status": variant_new['Variant']['kms_mapping_active_status'], |
|
|
"kms_mapping_active_status": variant_new['Variant'][ |
|
|
|
|
|
'kms_mapping_active_status'], |
|
|
"Assets": variant_asset_list, |
|
|
"Assets": variant_asset_list, |
|
|
"Systems": system_list |
|
|
"Systems": system_list |
|
|
}}) |
|
|
}}) |
|
@ -590,9 +596,10 @@ def merge_json_files(old_json_path, new_json_path, out_file_path): |
|
|
json_object = json.dumps(data_consolidated) |
|
|
json_object = json.dumps(data_consolidated) |
|
|
with open(out_file_path, "w") as outfile: |
|
|
with open(out_file_path, "w") as outfile: |
|
|
outfile.write(json_object) |
|
|
outfile.write(json_object) |
|
|
return out_file_path |
|
|
return True, out_file_path |
|
|
except Exception as e: |
|
|
except Exception as e: |
|
|
return "Exception in publish" |
|
|
return False, frappe.get_traceback() |
|
|
|
|
|
# return "Exception in publish" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_step_total_count(json_path): |
|
|
def get_step_total_count(json_path): |
|
|