@ -136,7 +136,7 @@ class Publish(Document):
update_publish_status = frappe . db . sql (
""" update `tabPublish` set publish_status= ' Published ' where name= ' {0} ' """ . format ( self . name ) )
if self . docstatus == 1 and self . publish_module == ' Feature Finder ' and self . publish_type == ' Global ' :
if self . docstatus == 1 and self . publish_module == ' Torque Manual ' and self . publish_type == ' Global ' :
torque_manual_publish ( self . vehicle , self . vehicle_id ,
self . language , self . publish_type ,
self . release_description , self . torque_manual_publish_docs ,
@ -144,6 +144,23 @@ class Publish(Document):
update_torque_manual_published_docs ( self )
update_publish_status = frappe . db . sql (
""" update `tabPublish` set publish_status= ' Published ' where name= ' {0} ' """ . format ( self . name ) )
# training information
if self . docstatus == 1 and self . publish_status == ' To Publish ' and self . publish_module == ' Training Information ' and self . publish_type == ' Internal ' :
training_information_publish ( self . vehicle , self . vehicle_id ,
self . language , self . publish_type ,
self . release_description , self . variant ,
self . training_information_publish_docs )
update_publish_status = frappe . db . sql (
""" update `tabPublish` set publish_status= ' Published ' where name= ' {0} ' """ . format ( self . name ) )
if self . docstatus == 1 and self . publish_status == ' To Publish ' and self . publish_module == ' Training Information ' and self . publish_type == ' Global ' and self . training_information_publish_docs :
training_information_publish ( self . vehicle , self . vehicle_id ,
self . language , self . publish_type ,
self . release_description , self . variant ,
self . training_information_publish_docs )
update_training_published_docs ( self )
update_publish_status = frappe . db . sql (
""" update `tabPublish` set publish_status= ' Published ' where name= ' {0} ' """ . format ( self . name ) )
variant = self . variant_mapping_details . split ( ' /n ' )
update_publish_mapping ( self . vehicle , self . variant_mapping ,
@ -257,6 +274,21 @@ def update_publish_mapping(vehicle, variant, language, module, publish_type):
f """ UPDATE `tabModule Publish Mapping` set repairservice_check_sheet= ' 1 ' ,publish_type= ' { publish_type } ' where name = ' { d [ ' name ' ] } ' """ )
frappe . db . commit ( )
elif module == ' Training Information ' :
pub_data = frappe . db . get_list ( ' Module Publish Mapping ' , filters = {
" vehicle " : vehicle ,
" language " : language ,
" publish_type " : publish_type
} , fields = [ ' name ' ] )
if len ( pub_data ) > 0 :
for d in pub_data :
if d [ ' name ' ] :
frappe . db . sql (
f """ UPDATE `tabModule Publish Mapping` set training_information= ' 1 ' ,publish_type= ' { publish_type } ' where name = ' { d [ ' name ' ] } ' """ )
frappe . db . commit ( )
elif module == ' Special Tool ' :
pub_data = frappe . db . get_list ( ' Module Publish Mapping ' , filters = {
@ -461,7 +493,18 @@ def generate_global_publish(name, module_name):
" publish_module " : res . publish_module ,
" torque_manual_publish_docs " : res . torque_manual_publish_docs
} )
ret . save ( )
elif module_name == ' Training Information ' :
ret = frappe . get_doc ( {
" doctype " : " Publish " ,
" vehicle " : res . vehicle ,
" publish_type " : " Global " ,
" language " : res . language ,
" publish_status " : ' To Publish ' ,
" release_description " : res . release_description ,
" publish_module " : res . publish_module ,
" training_information_publish_docs " : res . training_information_publish_docs
} )
ret . save ( )
return { " status " : " success " , " message " : ret . name , " url " : frappe . utils . get_url ( ) }
except Exception as e :
frappe . throw ( str ( e ) )
@ -662,6 +705,15 @@ def update_special_tool_publish_docs(self):
except Exception as e :
frappe . throw ( str ( e ) )
def update_training_published_docs ( self ) :
try :
for d in self . training_information_publish :
frappe . db . sql (
""" UPDATE `tabTraining Information` set published=1 where name= ' {0} ' """ . format ( d . parent1 ) )
frappe . db . commit ( )
except Exception as e :
frappe . throw ( str ( e ) )
def create_publish_folders ( folder_url ) :
try :
@ -684,17 +736,6 @@ def create_publish_folders(folder_url):
''' New Module Publish Section '''
@frappe . whitelist ( )
def get_service_repair ( vehicle ,
language_label , publish_type ) :
data = frappe . db . sql ( f '''
select * from ` tabRepair Service Mapping ` where vehicle = ' {vehicle} ' and language = ' {language_label} ' and published = 0 ;
''' , as_dict=1)
return data
def create_df ( data_set ) :
feature_finder_tmp = json . dumps ( data_set )
df = pd . DataFrame ( json . loads ( feature_finder_tmp ) )
@ -708,17 +749,6 @@ def create_df(data_set):
res1 [ key ] = value . to_dict ( ' records ' )
return res1
@frappe . whitelist ( )
def get_feature_finder ( vehicle = None , variant = None , language_label = None ) :
try :
data = frappe . db . sql (
f """ select * from `tabFeature Finder` where vehicle= ' { vehicle } ' and variant_id= ' { variant } ' and language= ' { language_label } ' and is_published=0 """ , as_dict = 1 )
return data
except Exception as e :
return str ( e )
def repair_checksheet_publish ( vehicle , vehicle_id ,
language , publish_type , release_description ,
variant , parent ) :
@ -1351,7 +1381,7 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
new_variant_name = [
x for x in new_variant if x not in find_distinct ]
vehi_data = add_new_val (
vehi_data , new_variant_name , feature_finder _tmp)
vehi_data , new_variant_name , torque_manual _tmp)
elif os . path . isfile ( file_path ) and publish_type == ' Global ' :
frappe . log_error ( ' inside ' )
@ -1368,7 +1398,7 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
publish_type , i . parent1 , i . variant )
if torque_manual [ ' status ' ] == 1 :
publish_torque_manual = 1
feature_finder _tmp. append (
torque_manual _tmp. append (
torque_manual [ ' data ' ] [ 0 ] )
torque_manual_tmp = create_df ( torque_manual_tmp )
latest_pub_data [ ' data ' ] = torque_manual_tmp
@ -1463,6 +1493,309 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
f ' { vehicle } - { language } - { publish_type } - { module_name } error in json creation ' + str ( e ) )
frappe . throw ( ' Failed To Publish ' )
# def training_information_publish(vehicle=None, vehicle_id=None,
# language=None, publish_type=None,
# release_description=None,variant=None,parent=None):
# try:
# logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}'
# logger = frappe.logger(logger_file,
# allow_site=True, file_count=100)
# logger.info(
# f"start of fetching Training Information data - {vehicle} - {language} - {module_name} - {variant}")
# folder_url = base_url+"/"+vehicle.replace(' ', '-') + "/"+language
# logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}')
# '''Final Json'''
# vehicle_data = {
# 'vehicle': vehicle,
# 'vehicle_myid': vehicle_id,
# 'publish_type': publish_type,
# 'publish_description': release_description,
# 'publish_language': language,
# 'data': ''
# }
# latest_pub_data = vehicle_data.copy()
# '''Publish Ready Flags'''
# publish_training_information = 0
# training_information_tmp = []
# '''Create Folder For Publish'''
# create_publish_folders(folder_url)
# full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
# file_path = folder_url + "/" + publish_type + "/" + \
# vehicle.replace(' ', '-') + '-training_information' + '.json'
# global_file_path = folder_url + "/" + 'Global' + "/" + \
# vehicle.replace(' ', '-') + '-training_information' + '.json'
# full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
# # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
# global_full_update_path = folder_url + "/" + "Global_Full_Update"
# check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
# '''Append Published Data to Json'''
# logger.info(
# f'Training Data Append Start::{vehicle}-{language}-{publish_type}')
# '''update existing global json file'''
# if os.path.isfile(global_file_path) and publish_type == 'Internal':
# if os.path.isfile(check_glbl_full_updte):
# with open(check_glbl_full_updte) as f:
# published_data = json.load(f)
# for i in parent:
# training_information = training_information_data(vehicle, language,
# publish_type, i.parent1)
# if training_information['status'] == 1:
# publish_training_information = 1
# training_information_tmp.append(
# training_information['data'][0])
# training_information_tmp = create_df(training_information_tmp)
# latest_pub_data['data'] = training_information_tmp
# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
# 'data': training_information_tmp})
# if vehi_data:
# find_distinct = set(exisitng_var)
# new_variant_name = [
# x for x in new_variant if x not in find_distinct]
# vehi_data = add_new_val(
# vehi_data, new_variant_name,training_information_tmp)
# elif os.path.isfile(file_path) and publish_type == 'Global':
# frappe.log_error('inside')
# full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
# #Remove Internal Full Update File On Global Publish
# existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
# frappe.log_error('file_path' + str(existing_internal_full_update))
# if os.path.isfile(existing_internal_full_update):
# os.remove(existing_internal_full_update)
# with open(check_glbl_full_updte) as f:
# published_data = json.load(f)
# for i in parent:
# training_information = training_information_data(vehicle, language,
# publish_type, i.parent1)
# if training_information['status'] == 1:
# publish_training_information = 1
# training_information_tmp.append(
# training_information['data'][0])
# # training_information_tmp = create_df(training_information_tmp)
# latest_pub_data['data'] = training_information_tmp
# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
# 'data': training_information_tmp})
# if vehi_data:
# find_distinct = set(exisitng_var)
# new_variant_name = [
# x for x in new_variant if x not in find_distinct]
# vehi_data = add_new_val(
# vehi_data,new_variant_name, training_information_tmp)
# else:
# #Existing Global or Internal Full Update
# internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
# check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
# if os.path.isfile(check_full_update_file):
# with open(check_full_update_file) as f:
# published_data = json.load(f)
# for i in parent:
# frappe.log_error("i",str(parent))
# training_information = training_information_data(
# vehicle, language, publish_type, i.parent1)
# if training_information['status'] == 1:
# publish_training_information = 1
# if len(training_information['data']):
# training_information_tmp.append(
# training_information['data'][0])
# else:
# frappe.throw('failed to publish')
# # training_information_tmp = create_df(training_information_tmp)
# latest_pub_data['data'] = training_information_tmp
# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
# 'data': training_information_tmp})
# if vehi_data:
# find_distinct = set(exisitng_var)
# new_variant_name = [
# x for x in new_variant if x not in find_distinct]
# vehi_data = add_new_val(
# vehi_data, new_variant_name,training_information_tmp)
# vehicle_data['data'] = vehi_data
# #First Time Global Or Internal
# else:
# for i in parent:
# training_information = training_information_data(
# vehicle, language, publish_type, i.parent1)
# if training_information['status'] == 1:
# publish_training_information = 1
# if len(training_information['data'])>0:
# training_information_tmp.append(
# training_information['data'][0])
# else:
# frappe.throw('failed to publish')
# training_information_tmp = get_latest_data(
# {'data': training_information_tmp}, {'data': []})
# frappe.log_error(str(training_information_tmp))
# # vehi_data = create_df(training_information_tmp)
# latest_pub_data['data'] = training_information_tmp
# if publish_type == "Global":
# full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
# #Remove Internal Full Update File On Global Publish
# existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
# frappe.log_error('file_path' + str(existing_internal_full_update))
# if os.path.isfile(existing_internal_full_update):
# os.remove(existing_internal_full_update)
# """ Save publish file """
# vehicle_data['data'] = training_information_tmp
# with open(file_path, 'w') as outfile:
# outfile.write(json.dumps(latest_pub_data, indent=4, default=str))
# with open(full_update_file, 'w') as outfile:
# outfile.write(json.dumps(vehicle_data, indent=4, default=str))
# logger.info(
# f'Training Information Data Append Start::{vehicle}-{language}-{publish_type}')
# return 1, file_path
# except Exception as e:
# logger.info(
# f'{vehicle} - {language} - {publish_type} - {module_name} error in json creation' + str(e))
# frappe.throw('Failed To Publish')
def training_information_publish ( vehicle , vehicle_id ,
language , publish_type , release_description ,
variant , parent ) :
try :
logger_file = f ' { vehicle } - { language } - { publish_type } - training information '
logger = frappe . logger ( logger_file , allow_site = True , file_count = 100 )
logger . info (
f ' Training Information Started- { vehicle } - { language } - { publish_type } - { parent } ' )
folder_url = base_url + " / " + vehicle . replace ( ' ' , ' - ' ) + " / " + language
logger . info ( f ' Created Folder- { vehicle } - { language } - { publish_type } ' )
''' Publish Ready Flags '''
publish_training_information = 0
training_information_tmp = [ ]
training_information_tmp1 = [ ]
''' Create Folder For Publish '''
create_publish_folders ( folder_url )
file_path = folder_url + " / " + publish_type + " / " + \
vehicle . replace ( ' ' , ' - ' ) + ' -training_information ' + ' .json '
# full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
global_file_path = folder_url + " / " + ' Global ' + " / " + \
vehicle . replace ( ' ' , ' - ' ) + ' -training_information ' + ' .json '
check_inter_file = folder_url + " / " + ' Internal ' + " / " + \
vehicle . replace ( ' ' , ' - ' ) + ' -training_information ' + ' .json '
''' Append Published Data to Json '''
logger . info (
f ' Training Information Data Append Start:: { vehicle } - { language } - { publish_type } ' )
vehicle_data = {
' vehicle ' : vehicle ,
' vehicle_myid ' : vehicle_id ,
' publish_type ' : publish_type ,
' publish_description ' : release_description ,
' publish_language ' : language ,
' data ' : ' '
}
''' update existing global json file '''
if os . path . isfile ( global_file_path ) and publish_type == ' Internal ' :
if os . path . isfile ( global_file_path ) :
with open ( global_file_path ) as f :
published_data = json . load ( f )
for i in parent :
training_information = training_information_data ( vehicle , language ,
publish_type , i . parent1 )
if training_information [ ' status ' ] == 1 :
publish_training_information = 1
training_information_tmp . append (
training_information [ ' data ' ] [ 0 ] )
training_information_tmp = get_latest_data ( { ' data ' : published_data [ ' data ' ] } , {
' data ' : training_information_tmp } )
elif os . path . isfile ( global_file_path ) and publish_type == ' Global ' :
with open ( global_file_path ) as f :
published_data = json . load ( f )
for i in parent :
training_information = training_information_data ( vehicle , language ,
publish_type , i . parent1 )
if training_information [ ' status ' ] == 1 :
publish_training_information = 1
training_information_tmp . append (
training_information [ ' data ' ] [ 0 ] )
training_information_tmp = get_latest_data ( { ' data ' : published_data [ ' data ' ] } , {
' data ' : training_information_tmp } )
if os . path . isfile ( check_inter_file ) :
frappe . log_error ( str ( ' internal aleady exists ' ) )
with open ( check_inter_file ) as f :
published_data = json . load ( f )
for i in parent :
training_information = training_information_data ( vehicle , language ,
publish_type , i . parent1 )
if training_information [ ' status ' ] == 1 :
publish_training_information = 1
training_information_tmp1 . append (
training_information [ ' data ' ] [ 0 ] )
frappe . log_error ( ' before ' + str ( training_information_tmp1 ) )
training_information_tmp = get_latest_data ( { ' data ' : published_data [ ' data ' ] } , {
' data ' : training_information_tmp1 } )
else :
frappe . log_error ( str ( ' new internal publish ' ) )
for i in parent :
training_information = training_information_data (
vehicle , language , publish_type , i . parent1 )
if training_information [ ' status ' ] == 1 :
publish_training_information = 1
if len ( training_information [ ' data ' ] ) :
training_information_tmp . append (
training_information [ ' data ' ] [ 0 ] )
# frappe.msgprint(str(repair_checksheet['data'][0]))
training_information_tmp = get_latest_data (
{ ' data ' : training_information_tmp } , { ' data ' : [ ] } )
frappe . log_error ( ' new pub data ' + str ( training_information_tmp ) )
if publish_type == " Global " :
#Remove Internal File On Global Publish
internal_check = folder_url + " / " + ' Internal ' + " / " + \
vehicle . replace ( ' ' , ' - ' ) + ' -training_information ' + ' .json '
if os . path . isfile ( internal_check ) :
os . remove ( internal_check )
if publish_training_information == 1 :
""" Save publish file """
vehicle_data [ ' data ' ] = training_information_tmp
frappe . log_error ( str ( file_path ) )
with open ( file_path , ' w ' ) as outfile :
outfile . write ( json . dumps ( vehicle_data , indent = 4 , default = str ) )
return 1 , file_path
except Exception as e :
logger . info (
f ' { vehicle } - { language } - { publish_type } error in json creation ' + str ( e ) )
frappe . throw ( ' Failed To Publish ' )
def get_key_value_data ( data ) :
module_dic = { }
for d in data [ ' data ' ] :
@ -1802,11 +2135,11 @@ def torque_manual_data(vehicle=None, language=None,
allow_site = True , file_count = 100 )
logger . info (
f " start of fetching torque manual data - { vehicle } - { language } - { variant } - { module_name } " )
torque_manual_details = frappe . db . sql ( ''' select name,variant,vehicle,language,active_status,category,sub_category,display_order,my_id
from ` tabTorque Manual ` where variant = ' %s ' & & language = ' %s ' ; ''' % (variant,language), as_dict=1)
torque_manual_details = frappe . db . sql ( ''' select name,variant,vehicle,language,active_status,category,sub_category,display_order,keywords, my_id
from ` tabTorque Manual ` where variant = ' %s ' & & language = ' %s ' ; ''' % (variant,language), as_dict=1)
for t in torque_manual_details :
t [ ' torque_table ' ] = frappe . db . sql ( ''' select idx as ' display_order ' ,fastener_description,specification,qty,torque_nm,remark from
` tabTorque Manual Details ` where parent = ' %s ' order by display_order ; ''' % (t[ ' name ' ]),as_dict=1)
` tabTorque Manual Details ` where parent = ' %s ' order by display_order ; ''' % (t[' name ' ]),as_dict=1)
logger . info (
f " end of Torque Manual data { variant } - { language } " )
@ -1820,10 +2153,43 @@ def torque_manual_data(vehicle=None, language=None,
e )
logger . error ( ' error in Torque Manual ' + str ( e ) )
return failure_reponse
def training_information_data ( vehicle = None , language = None ,
publish_type = None , parent = None , variant = None ) :
try :
logger_file = f ' { vehicle } - { language } - { publish_type } - training_information_data '
logger = frappe . logger ( logger_file ,
allow_site = True , file_count = 100 )
logger . info (
f " start of fetching training information data - { vehicle } - { language } - { publish_type } - { parent } " )
training_information_details = frappe . db . sql ( f """ select name,vehicle,category,language,display_order,keywords,my_id,active_status from
` tabTraining Information ` where vehicle = ' {vehicle} '
and language = ' {language} ' and name = ' {parent} ' """ , as_dict=1)
for d in training_information_details :
d [ ' Content ' ] = frappe . db . sql ( f """ select idx as ' display_order ' ,topic,content from `tabTraining Information Content Reference`
where parent = ' {d['name']} '
order by display_order ; """ , as_dict=1)
logger . info (
f " end of fetching training informataion data { vehicle } - { language } " )
success_reponse [ ' data ' ] = training_information_details
success_reponse [ ' message ' ] = f ' Training Information Fecthed Succesfully for { vehicle } - { language } - training_information_data '
return success_reponse
except Exception as e :
failure_reponse [ ' error ' ] = f " { vehicle } - { language } has following error - " + str (
e )
logger . error ( ' error in Training Information ' + str ( e ) )
return failure_reponse
@frappe . whitelist ( )
def max_publish_new_module ( doc ) :
doc = frappe . get_doc ( " Publish " , doc )
ver = frappe . db . sql ( """ select max(version) from {0} .tabPublish where vehicle = " {1} " and language = " {2} " and publish_module!= " Automotive System " ; """ . format (
ver = frappe . db . sql ( """ select max(version) from {0} .` tabPublish` where vehicle = " {1} " and language = " {2} " and publish_module!= " Automotive System " ; """ . format (
current_db_name , doc . vehicle , doc . language ) )
if ver :
@ -1831,7 +2197,7 @@ def max_publish_new_module(doc):
@frappe . whitelist ( )
def cal_ver_new_module ( vehicle , lang , publish_type , doc ) :
def cal_ver_new_module ( vehicle , lang , publish_type , doc = None ) :
try :
current_db_name = frappe . conf . get ( " db_name " )
doc = frappe . get_doc ( " Publish " , doc )
@ -1859,9 +2225,33 @@ def cal_ver_new_module(vehicle, lang, publish_type, doc):
except Exception as e :
return { " status " : 0 , " data " : " None " , " error " : str ( e ) }
@frappe . whitelist ( )
def get_service_repair ( vehicle ,
language_label , publish_type ) :
data = frappe . db . sql ( f '''
select * from ` tabRepair Service Mapping ` where vehicle = ' {vehicle} ' and language = ' {language_label} ' and published = 0 ;
''' , as_dict=1)
return data
@frappe . whitelist ( )
def get_qwik_service ( variant = None , language_label = None , kilometers = None ) :
def get_feature_finder ( vehicle = None , variant = None , language_label = None ) :
try :
data = frappe . db . sql (
f """ select * from `tabFeature Finder` where vehicle= ' { vehicle } ' and variant_id= ' { variant } ' and language= ' { language_label } ' and is_published=0 """ , as_dict = 1 )
return data
except Exception as e :
return str ( e )
@frappe . whitelist ( )
def get_training_information ( vehicle = None , language_label = None ) :
try :
data = frappe . db . sql ( '''
select * from ` tabTraining Information ` where vehicle = ' %s ' and language = ' %s ' and is_published = ' %s ' ; ''' % (vehicle,language_label,0), as_dict=1)
return data
except Exception as e :
return str ( e )
@frappe . whitelist ( )
def get_qwik_service ( variant = None , language_label = None ) :
try :
data = frappe . db . sql ( """ select * from `tabQwik Service` where variant= ' %s '
and language = ' %s ' and is_published = ' %s ' ; """ % (variant, language_label, 0), as_dict=1)
@ -1872,7 +2262,7 @@ def get_qwik_service(variant=None, language_label=None, kilometers=None):
@frappe . whitelist ( )
def get_special_tool ( vehicle = None , publish_type = None ) :
def get_special_tool ( vehicle = None ) :
data = frappe . db . sql ( '''
select * from ` tabSpecial Tool Information ` where vehicle = ' %s ' and published = ' %s ' ; ''' % (vehicle, 0), as_dict=1)
return data
@ -1881,8 +2271,8 @@ def get_special_tool(vehicle=None, publish_type=None):
@frappe . whitelist ( )
def get_torque_manual ( variant = None , language_label = None ) :
try :
data = frappe . db . sql (
""" select * from `tabTorque Manual` where variant= ' %s ' and language= ' %s ' and is_published= ' %s ' ; """ % ( variant , language_label , 0 ) , as_dict = 1 )
data = frappe . db . sql ( '''
select * from ` tabTorque Manual ` where variant = ' %s ' and language = ' %s ' and is_published = ' %s ' ; ''' % (variant,language_label,0), as_dict=1 )
return data
except Exception as e :
return str ( e )