From bb4600bece5c8a0a27d7e8515d386801fc23a646 Mon Sep 17 00:00:00 2001 From: venkataakhil Date: Thu, 16 Nov 2023 12:09:30 +0530 Subject: [PATCH 1/7] kilometer mapping reload fix --- .../kilometer_mapping/kilometer_mapping.js | 17 +++++++++++------ .../kilometer_mapping/kilometer_mapping.py | 8 +++++--- .../transactions/doctype/publish/publish.py | 4 +--- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js index c4c5a37..f1ffbb3 100644 --- a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js +++ b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js @@ -56,7 +56,10 @@ frappe.ui.form.on('Kilometer Mapping', { }, validate: function (frm) { - + if (frm.doc.__unsaved == 1){ + frm.doc.workflow_state = "Draft"; + } + frm.doc.kilometer_filter = "All"; if (frm.doc.kilometer_filter == "All") { frm.get_field('config_kilometer').grid.static_rows = false; @@ -69,7 +72,7 @@ frappe.ui.form.on('Kilometer Mapping', { }, refresh: function (frm) { - //custom button + //custom duplicate button frm.add_custom_button(__("Duplicate Data"), function () { let d = new frappe.ui.Dialog({ title: 'Choose the variant', @@ -96,16 +99,19 @@ frappe.ui.form.on('Kilometer Mapping', { }, callback: function (r) { if (r.message && r.message['status'] == "success") { + frm.doc.workflow_state = 'Draft'; + frm.set_value('workflow_state','Draft') cur_frm.refresh_fields("config_kilometer"); + cur_frm.refresh_fields("workflow_state"); frm.save() frappe.show_alert({ message: __('Saved'), indicator: 'green' }, 5); - frm.refresh() - - + // frm.refresh() + // frm.refresh_fields("config_kilometer"); + cur_frm.reload_doc(); } } }) @@ -122,7 +128,6 @@ frappe.ui.form.on('Kilometer Mapping', { d.show(); setTimeout(function mysam() { - console.log("jiiii", $('.modal-dialog').find('.custom-actions').html('

Are you sure to update the kilometer mapping?

')) $('.modal-dialog').find('.custom-actions').html('

Are you sure to update the kilometer mapping?

') } , 300); diff --git a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py index 26ffeef..a5d885c 100644 --- a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py +++ b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py @@ -6,7 +6,7 @@ from frappe.model.document import Document import sys from frappe import _ sys.tracebacklimit=0 - +current_db_name = frappe.conf.get("db_name") class KilometerMapping(Document): def onload(self): ###Kilometer mapping config table idx allignment @@ -15,10 +15,9 @@ class KilometerMapping(Document): for x in self.get("config_kilometer"): idx += 1 x.idx = idx - @frappe.whitelist() def get_system(doc): - current_db_name = frappe.conf.get("db_name") + system_map = frappe.db.sql("""select name from {0}.`tabSystem Mapping` where variant_mapping='{1}'""".format(current_db_name,doc),as_dict=True) if system_map: for s in system_map: @@ -64,7 +63,10 @@ def get_config_kilometer(doc,docname): ress.append("config_kilometer",d) ress.add_comment(text=_("data added"+str(res))) ress.save() + frappe.db.sql(f"""update `tabKilometer Mapping` set workflow_state = 'Draft' where name = '{docname}' + """) frappe.db.commit() + return {"status":"success"} diff --git a/smart_service/transactions/doctype/publish/publish.py b/smart_service/transactions/doctype/publish/publish.py index a986c6c..7461f67 100644 --- a/smart_service/transactions/doctype/publish/publish.py +++ b/smart_service/transactions/doctype/publish/publish.py @@ -215,18 +215,16 @@ def update_qwik_published_docs(self): def update_publish_mapping(vehicle, variant, language, module, publish_type): # frappe.set_user('Administrator') try: - frappe.log_error("calling module", str(module)) if module == 'Repair service': pub_data = frappe.db.get_list('Module Publish Mapping', filters={ "vehicle": vehicle, "language": language, "publish_type": publish_type }, fields=['name']) - frappe.log_error("pub_data", str(pub_data)) + if len(pub_data) > 0: for d in pub_data: if d['name']: - frappe.log_error("kkkkkkkkkk") frappe.db.sql( f"""UPDATE `tabModule Publish Mapping` set repairservice_check_sheet='1',publish_type='{publish_type}' where name ='{d['name']}'""") From 3d55c6285e3ba2fd79816b4f61a185baeb02bef8 Mon Sep 17 00:00:00 2001 From: venkata akhil Date: Wed, 29 Nov 2023 10:15:05 +0530 Subject: [PATCH 2/7] Publish Changes --- smart_service/apis/v2/master.py | 290 ++++++++++++------ .../kilometer_mapping/kilometer_mapping.py | 115 ++++--- 2 files changed, 258 insertions(+), 147 deletions(-) diff --git a/smart_service/apis/v2/master.py b/smart_service/apis/v2/master.py index 70bbb56..7385f9f 100644 --- a/smart_service/apis/v2/master.py +++ b/smart_service/apis/v2/master.py @@ -18,23 +18,23 @@ date_format = "%Y-%m-%d %H:%M:%S.%f" current_db_name = frappe.conf.get("db_name") # style="""""" @@ -175,7 +175,7 @@ def get_training_information(vehicle, language): @frappe.whitelist(methods=['POST']) -def new_publish(): +def old_publish(): req = json.loads(frappe.request.data) try: module = req['module'] @@ -187,7 +187,7 @@ def new_publish(): data = [] if module == 'Feature Finder': - + flag, data = get_feature_finder(vehicle, variant, language) elif module == 'Repair/Service Check Sheet': @@ -196,98 +196,119 @@ def new_publish(): elif module == 'QWIK Service': flag, data = get_qwik_service(variant, language) CLEANR = re.compile('<.*?>') - CLEANR_back_slash = re.compile("\"") - ul_html='' - find_last_ul='' - + CLEANR_back_slash = re.compile("\"") + ul_html = '' + find_last_ul = '' + for dt in data: - if len(dt['consumables'])>0: - + if len(dt['consumables']) > 0: + for con in dt['consumables']: if con['content']: con['content'] = re.sub(CLEANR, '', con['content']) # if len(dt['pre_work'])>0: for pw in dt['pre_work']: - reg_str = "

" + reg_str = "

" reg_str1 = "

" if pw['content']: res = str(pw['content']).find('
') res1 = str(pw['content']).find('

') if res == 1: - pw['content'] = re.sub('

','',pw['content']) - pw['content'] = re.sub('
','',pw['content']) - pw['content'] = re.sub('

','

',pw['content'] ) - pw['content'] = re.sub('

','
',pw['content'] ) - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = re.sub( + '
', '', pw['content']) + pw['content'] = re.sub( + '
', '', pw['content']) + pw['content'] = re.sub( + '

', '

', pw['content']) + pw['content'] = re.sub( + '

', '
', pw['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) pw['content'] = cnverted_data else: - pw['content'] = '
'+pw['content']+'
' - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = '
' + \ + pw['content']+'
' + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) frappe.log_error(str('if')+pw['content']) pw['content'] = cnverted_data if res1 == 1: - pw['content'] = re.sub('
','',pw['content']) - pw['content'] = re.sub('
','',pw['content']) - pw['content'] = re.sub('

','

',pw['content'] ) - pw['content'] = re.sub('

','
',pw['content'] ) - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = re.sub( + '
', '', pw['content']) + pw['content'] = re.sub( + '
', '', pw['content']) + pw['content'] = re.sub( + '

', '

', pw['content']) + pw['content'] = re.sub( + '

', '
', pw['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) pw['content'] = cnverted_data else: frappe.log_error(str('else')+pw['content']) - pw['content'] = '
'+pw['content']+'
' - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = '
' + \ + pw['content']+'
' + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) frappe.log_error(str('if')+pw['content']) pw['content'] = cnverted_data - if dt['qwik_procedure']: for qp in dt['qwik_procedure']['LHS']: if qp['content']: - qp['content'] = re.sub('
    ', ul_html,qp['content'] ) - qp['content'] = re.sub('
  • ',li_html,qp['content']) - clsing_li_tag = re.sub('
  • ',cling_li,qp['content']) + qp['content'] = re.sub( + '
      ', ul_html, qp['content']) + qp['content'] = re.sub( + '
    • ', li_html, qp['content']) + clsing_li_tag = re.sub( + '
    • ', cling_li, qp['content']) qp['content'] = clsing_li_tag - res = re.sub(find_last_ul,cling_last,qp['content']) + res = re.sub( + find_last_ul, cling_last, qp['content']) qp['content'] = res - - cnverted_data = re.sub(CLEANR_back_slash,"'",qp['content']) + + cnverted_data = re.sub( + CLEANR_back_slash, "'", qp['content']) qp['content'] = cnverted_data - for qp_rhs in dt['qwik_procedure']['RHS']: if qp_rhs: - qp_rhs['content'] = re.sub('
        ', ul_html,qp_rhs['content'] ) - qp_rhs['content'] = re.sub('
      • ',li_html,qp_rhs['content']) - qp_rhs['content'] = re.sub('
      • ',cling_li,qp_rhs['content']) - qp_rhs['content'] = re.sub(find_last_ul,cling_last,qp_rhs['content']) - cnverted_data = re.sub(CLEANR_back_slash,"'",qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
          ', ul_html, qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
        • ', li_html, qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
        • ', cling_li, qp_rhs['content']) + qp_rhs['content'] = re.sub( + find_last_ul, cling_last, qp_rhs['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", qp_rhs['content']) qp_rhs['content'] = cnverted_data - - elif module == 'Training Information': # flag, data = get_training_information(vehicle, language) pass elif module == 'Mahindra Special Tool Information': - CLEANR_back_slash = re.compile("\"") + CLEANR_back_slash = re.compile("\"") html_code_ul = '
            ' - html_code_li = '
          • ' + html_code_li = '
          • ' flag, data = get_special_tool_information(vehicle) for dt in data: - if len(dt['instructions'])>0: + if len(dt['instructions']) > 0: for c_t in dt['instructions']: if c_t['content_type'] == 'Description': if c_t['content']: - c_t['content'] = html_code_ul+html_code_li+c_t['content']+'
          • '+'
          ' - cnverted_data = re.sub(CLEANR_back_slash,"'",c_t['content']) + c_t['content'] = html_code_ul + \ + html_code_li+c_t['content']+''+'
        ' + cnverted_data = re.sub( + CLEANR_back_slash, "'", c_t['content']) c_t['content'] = cnverted_data - elif module == 'PDI Inspection': pass @@ -311,8 +332,8 @@ def new_publish(): return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None} -@frappe.whitelist(methods=['POST']) -def new_publish1(): +@frappe.whitelist(methods=['POST'], allow_guest=1) +def new_publish(): req = json.loads(frappe.request.data) try: module = req['module'] @@ -328,30 +349,109 @@ def new_publish1(): if len(publish_type) > 0: publish_type = publish_type[0]['publish_type'] + else: + return {"status": 0, "error": "Publish Type Not Set For: " + iid} BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \ str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/' + FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + '/' + str(publish_type) + \ + '/'+"{}".format(publish_type)+" Full Update/" if module == 'Feature Finder': - with open(BASE_PATH + str(vehicle) + '-feature_finder.json') as outfile: - data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + if publish_type == 'Internal': + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-feature_finder_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + + else: + LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + \ + '/' + str(publish_type) + '/' + with open(LATEST_GLOBAL_PATH + str(vehicle) + '-feature_finder.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + else: + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-feature_finder_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'Repair/Service Check Sheet': - with open(BASE_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile: - data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + if publish_type == 'Internal': + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-repair_check_sheet_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + else: + LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + \ + '/' + str(publish_type) + '/' + with open(LATEST_GLOBAL_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + else: + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-repair_check_sheet_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'QWIK Service': - with open(BASE_PATH + str(vehicle) + '-qwik_service.json') as outfile: - data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + if publish_type == 'Internal': + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-qwik_service_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + + data = data.get('data') + data = data.get('variant') + else: + LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + \ + '/' + str(publish_type) + '/' + with open(LATEST_GLOBAL_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get('variant') + else: + if os.path.isfile(FULL_UPDATE_PATH): + with open(FULL_UPDATE_PATH + str(vehicle) + '-qwik_service_full_update.json') as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get('variant') elif module == 'Training Information': pass @@ -359,9 +459,11 @@ def new_publish1(): elif module == 'Mahindra Special Tool Information': with open(BASE_PATH + str(vehicle) + '-special_tool_information.json') as outfile: data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'PDI Inspection': pass @@ -454,10 +556,22 @@ def update_publish_mapping(vehicle, variant, language, module): return 1 -def filter_publish_json(src_json, filter_cond): +# def filter_publish_json(src_json, filter_cond): +# try: +# # filtered_json = [src_json.get(filter_cond, None) for d in src_json] +# # if not filtered_json: +# # return None +# # return filtered_json[0] +# frappe.log_error(str(filter_cond)) +# return src_json[filter_cond] +# except Exception as e: +# frappe.throw(str(e)) + + +def valid_input(input): try: - filtered_json = [src_json.get(filter_cond, None) for d in src_json] + for key, val in input.items(): + pass - return filtered_json[0] except Exception as e: - frappe.throw(str(e)) + return str(e) diff --git a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py index 26ffeef..09b7d60 100644 --- a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py +++ b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py @@ -5,75 +5,72 @@ import frappe from frappe.model.document import Document import sys from frappe import _ -sys.tracebacklimit=0 +sys.tracebacklimit = 0 + class KilometerMapping(Document): - def onload(self): - ###Kilometer mapping config table idx allignment - if(self.config_kilometer): - idx = 0 - for x in self.get("config_kilometer"): - idx += 1 - x.idx = idx + def onload(self): + # Kilometer mapping config table idx allignment + if (self.config_kilometer): + idx = 0 + for x in self.get("config_kilometer"): + idx += 1 + x.idx = idx + @frappe.whitelist() def get_system(doc): - current_db_name = frappe.conf.get("db_name") - system_map = frappe.db.sql("""select name from {0}.`tabSystem Mapping` where variant_mapping='{1}'""".format(current_db_name,doc),as_dict=True) - if system_map: - for s in system_map: - doc = frappe.get_doc("System Mapping",s.name) - system = frappe.db.sql("""select sub_systems,mat from {0}.`tabSystem Mapping_Sub System` where parent = '{1}' and procedure_is_published = '0'""".format(current_db_name,s.name),as_dict=True) - return system - + current_db_name = frappe.conf.get("db_name") + system_map = frappe.db.sql("""select name from {0}.`tabSystem Mapping` where variant_mapping='{1}'""".format( + current_db_name, doc), as_dict=True) + if system_map: + for s in system_map: + doc = frappe.get_doc("System Mapping", s.name) + system = frappe.db.sql("""select sub_systems,mat from {0}.`tabSystem Mapping_Sub System` where parent = '{1}' and procedure_is_published = '0'""".format( + current_db_name, s.name), as_dict=True) + return system + + @frappe.whitelist() def get_subsystem(doc): - current_db_name = frappe.conf.get("db_name") - system_map = frappe.db.sql("""select name from {0}.`tabSystem Mapping` where variant_mapping='{1}'""".format(current_db_name,doc),as_dict=True) - if system_map: - for s in system_map: - doc = frappe.get_doc("System Mapping",s.name) - sub_system = frappe.db.sql("""select sub_systems,mat,idx from {0}.`tabSystem Mapping_Sub System` where parent = '{1}' and procedure_is_published = '0'""".format(current_db_name,s.name),as_dict=True) - return sub_system + current_db_name = frappe.conf.get("db_name") + system_map = frappe.db.sql("""select name from {0}.`tabSystem Mapping` where variant_mapping='{1}'""".format( + current_db_name, doc), as_dict=True) + if system_map: + for s in system_map: + doc = frappe.get_doc("System Mapping", s.name) + sub_system = frappe.db.sql("""select sub_systems,mat,idx from {0}.`tabSystem Mapping_Sub System` where parent = '{1}' and procedure_is_published = '0'""".format( + current_db_name, s.name), as_dict=True) + return sub_system @frappe.whitelist() -def update_duplicate_km(doc,kilometer): - - km = frappe.get_doc("Kilometer Mapping",doc) - output = frappe.db.sql("""select kilometer,mat,sub_systems,row_name from `tabKilometer Mapping_Items` where parent = '{0}' and kilometer = '{1}' order by kilometer""".format(doc,kilometer),as_dict=True) - # frappe.msgprint(str(output)) - for data in output: - # frappe.msgprint(str(data['mat'])) - km.append('duplicate_kilometer_mapping_items', { - 'kilometer': data['kilometer'], - 'mat': data['mat'], - 'sub_systems': data['sub_systems'], - 'row_name': data['row_name'] - }) - km.save(ignore_permissions=True) - - - -@frappe.whitelist() -def get_config_kilometer(doc,docname): - res = frappe.db.sql(f"""select kilometer,mat,sub_systems,applicable,row_name from `tabKilometer Mapping_Items` where parent='{doc}'""",as_dict=1) - ress = frappe.get_doc("Kilometer Mapping",docname) - ress.config_kilometer = [] - for d in res: - ress.append("config_kilometer",d) - ress.add_comment(text=_("data added"+str(res))) - ress.save() - frappe.db.commit() - return {"status":"success"} - +def update_duplicate_km(doc, kilometer): -# @frappe.whitelist() -# def km_filter(parent = None , record = None, km = None): -# if record == "All": -# km_filters = frappe.db.sql(f"""select * from `tabKilometer Mapping_Items` where parent ="{parent}" order by length(kilometer),kilometer;""",as_dict = 1) + km = frappe.get_doc("Kilometer Mapping", doc) + output = frappe.db.sql("""select kilometer,mat,sub_systems,row_name from `tabKilometer Mapping_Items` where parent = '{0}' and kilometer = '{1}' order by kilometer""".format( + doc, kilometer), as_dict=True) + # frappe.msgprint(str(output)) + for data in output: + # frappe.msgprint(str(data['mat'])) + km.append('duplicate_kilometer_mapping_items', { + 'kilometer': data['kilometer'], + 'mat': data['mat'], + 'sub_systems': data['sub_systems'], + 'row_name': data['row_name'] + }) + km.save(ignore_permissions=True) -# else: -# km_filters =frappe.db.sql(f"""select * from `tabKilometer Mapping_Items` where parent ="{parent}" and kilometer ="{km}" order by length(kilometer),kilometer """,as_dict = 1) -# return km_filters \ No newline at end of file +@frappe.whitelist() +def get_config_kilometer(doc, docname): + res = frappe.db.sql( + f"""select kilometer,mat,sub_systems,applicable,row_name from `tabKilometer Mapping_Items` where parent='{doc}'""", as_dict=1) + ress = frappe.get_doc("Kilometer Mapping", docname) + ress.config_kilometer = [] + for d in res: + ress.append("config_kilometer", d) + ress.add_comment(text=_("data added"+str(res))) + ress.save() + frappe.db.commit() + return {"status": "success"} From b64c2b1dd3a232ab1d5ae99c9018e9ded6826b6a Mon Sep 17 00:00:00 2001 From: venkataakhil Date: Wed, 29 Nov 2023 10:31:37 +0530 Subject: [PATCH 3/7] publish changes --- smart_service/transactions/doctype/publish/publish.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/smart_service/transactions/doctype/publish/publish.py b/smart_service/transactions/doctype/publish/publish.py index 7461f67..3079ce3 100644 --- a/smart_service/transactions/doctype/publish/publish.py +++ b/smart_service/transactions/doctype/publish/publish.py @@ -1235,7 +1235,7 @@ def feature_finder_data(vehicle=None, language=None, ff.features_names as feature_name,ff.feature_category,ff.display_order,ff.active_status, ffm.feature_pdf as pdf from `tabFeature Finder` ff left join `tabFeature Finder Master` ffm on ff.features_names = ffm.feature_name - where ff.vehicle = "THAR" and ffm.vehicle = "{vehicle}" + where ff.vehicle = "{vehicle}" and ffm.vehicle = "{vehicle}" and ff.variant = "{variant}" and ff.language = "{language}" and ffm.language = "{language}" and ff.name="{parent}"; """, as_dict=1) From ab2fb4aa3e1222b09c3d0ad44544d0c1859b962d Mon Sep 17 00:00:00 2001 From: venkataakhil Date: Wed, 29 Nov 2023 10:32:14 +0530 Subject: [PATCH 4/7] publish changes --- smart_service/apis/app_user_login.py | 5 +- smart_service/apis/v2/master.py | 98 ++++- smart_service/apis/v2/phase2_test_publish.py | 137 +++++++ .../doctype/feature_finder/feature_finder.js | 13 +- .../feature_finder/feature_finder.json | 2 +- .../doctype/feature_finder/feature_finder.py | 17 +- .../feature_finder_master.py | 2 +- .../special_tool_information.py | 6 +- .../doctype/torque_category/__init__.py | 0 .../torque_category/test_torque_category.py | 8 + .../torque_category/torque_category.js | 8 + .../torque_category/torque_category.json | 41 ++ .../torque_category/torque_category.py | 8 + .../doctype/torque_details/__init__.py | 0 .../torque_details/torque_details.json | 58 +++ .../doctype/torque_details/torque_details.py | 8 + .../phase_2/doctype/torque_master/__init__.py | 0 .../torque_master/test_torque_master.py | 8 + .../doctype/torque_master/torque_master.js | 8 + .../doctype/torque_master/torque_master.json | 68 ++++ .../doctype/torque_master/torque_master.py | 8 + .../doctype/torque_subcategory/__init__.py | 0 .../test_torque_subcategory.py | 8 + .../torque_subcategory/torque_subcategory.js | 8 + .../torque_subcategory.json | 41 ++ .../torque_subcategory/torque_subcategory.py | 8 + .../training_information.py | 2 +- .../kilometer_mapping/kilometer_mapping.js | 2 - .../transactions/doctype/publish/publish.js | 187 +++++---- .../transactions/doctype/publish/publish.json | 14 +- .../transactions/doctype/publish/publish.py | 366 +++++++++++++----- .../variant_mapping/variant_mapping.py | 123 ++++-- 32 files changed, 1027 insertions(+), 235 deletions(-) create mode 100644 smart_service/apis/v2/phase2_test_publish.py create mode 100644 smart_service/phase_2/doctype/torque_category/__init__.py create mode 100644 smart_service/phase_2/doctype/torque_category/test_torque_category.py create mode 100644 smart_service/phase_2/doctype/torque_category/torque_category.js create mode 100644 smart_service/phase_2/doctype/torque_category/torque_category.json create mode 100644 smart_service/phase_2/doctype/torque_category/torque_category.py create mode 100644 smart_service/phase_2/doctype/torque_details/__init__.py create mode 100644 smart_service/phase_2/doctype/torque_details/torque_details.json create mode 100644 smart_service/phase_2/doctype/torque_details/torque_details.py create mode 100644 smart_service/phase_2/doctype/torque_master/__init__.py create mode 100644 smart_service/phase_2/doctype/torque_master/test_torque_master.py create mode 100644 smart_service/phase_2/doctype/torque_master/torque_master.js create mode 100644 smart_service/phase_2/doctype/torque_master/torque_master.json create mode 100644 smart_service/phase_2/doctype/torque_master/torque_master.py create mode 100644 smart_service/phase_2/doctype/torque_subcategory/__init__.py create mode 100644 smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py create mode 100644 smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js create mode 100644 smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json create mode 100644 smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py diff --git a/smart_service/apis/app_user_login.py b/smart_service/apis/app_user_login.py index b2464ea..5fcbec1 100644 --- a/smart_service/apis/app_user_login.py +++ b/smart_service/apis/app_user_login.py @@ -812,11 +812,14 @@ def input_validation(**kwargs): sts = "" i = 0 for key, value in kwargs.items(): - if value is None or value == "": + # frappe.log_error("key"+str(key)) + # frappe.log_error("value"+str(value)) + if value is None or value == "" or value == '': if i != 0: sts += ',' sts += str(key) + "" i += 1 + # frappe.log_error(sts) return sts diff --git a/smart_service/apis/v2/master.py b/smart_service/apis/v2/master.py index 70bbb56..0c7942e 100644 --- a/smart_service/apis/v2/master.py +++ b/smart_service/apis/v2/master.py @@ -6,7 +6,8 @@ from frappe import utils import json import html import os -from smart_service.apis.app_user_login import input_validation + + from frappe.utils import cstr site_name = cstr(frappe.local.site) @@ -311,7 +312,7 @@ def new_publish(): return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None} -@frappe.whitelist(methods=['POST']) +@frappe.whitelist(methods=['POST'],allow_guest = 1) def new_publish1(): req = json.loads(frappe.request.data) try: @@ -321,6 +322,10 @@ def new_publish1(): variant = req['variant'] language = req['language'] + val = input_validation(req) + + if val != '' or val != "": + return {"status": 0, "error": "Input parameter Missing: " + val} data = [] # Get Publish Type publish_type = frappe.db.get_list( @@ -328,40 +333,69 @@ def new_publish1(): if len(publish_type) > 0: publish_type = publish_type[0]['publish_type'] + else: + return {"status": 0, "error": "Publish Type Not Set For: " + iid} - BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \ - str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/' + # BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \ + # str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/' + FULL_UPDATE_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"{}".format(publish_type)+"_Full_Update/" + FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + LATEST_GLOBAL_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"Global_Full_Update/" + LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + if module == 'Feature Finder': - with open(BASE_PATH + str(vehicle) + '-feature_finder.json') as outfile: - data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + # if publish_type == 'Internal': + if os.path.isfile(FULL_UPDATE_PATH_FILE): + with open(FULL_UPDATE_PATH_FILE) as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + else: + + with open(LATEST_GLOBAL_FILE) as outfile: + data = json.load(outfile) + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) + + elif module == 'Repair/Service Check Sheet': with open(BASE_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile: data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'QWIK Service': with open(BASE_PATH + str(vehicle) + '-qwik_service.json') as outfile: data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'Training Information': pass elif module == 'Mahindra Special Tool Information': + with open(BASE_PATH + str(vehicle) + '-special_tool_information.json') as outfile: data = json.load(outfile) - data = data['data'] - filter_json = filter_publish_json(data, variant) - data = filter_json + # data = data['data'] + # filter_json = filter_publish_json(data, variant) + # data = filter_json + data = data.get('data') + data = data.get(variant) elif module == 'PDI Inspection': pass @@ -376,7 +410,7 @@ def new_publish1(): except Exception as e: frappe.local.response['http_status_code'] = 400 - return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None} + return {'status': 0, 'error': str(e), 'data': None} @frappe.whitelist(methods=['POST'], allow_guest=1) @@ -460,4 +494,28 @@ def filter_publish_json(src_json, filter_cond): return filtered_json[0] except Exception as e: - frappe.throw(str(e)) + frappe.throw("fil",str(e)) + + +def input_validation(req): + sts = "" + i = 0 + for key, value in req.items(): + frappe.log_error("key"+str(key)) + frappe.log_error("value"+str(value)) + if value is None or value == "" or value == '': + if i != 0: + sts += ',' + sts += str(key) + "" + i += 1 + frappe.log_error(sts) + return sts + + + + +@frappe.whitelist() +def get_published_data(): + data = frappe.db.sql("""select vehicle,variant,language,publish_type from `tabPublish` where not in (select vehicle,variant,language,publish_type from + `tabModule Publish Mapping`)""",as_dict=1) + return data \ No newline at end of file diff --git a/smart_service/apis/v2/phase2_test_publish.py b/smart_service/apis/v2/phase2_test_publish.py new file mode 100644 index 0000000..dcbfdb6 --- /dev/null +++ b/smart_service/apis/v2/phase2_test_publish.py @@ -0,0 +1,137 @@ +import frappe +import re +from urllib import request +import datetime +from frappe import utils +import json +import html +import os +from frappe.utils import logger +from frappe.utils import cstr +frappe.utils.logger.set_log_level("DEBUG") + +site_name = cstr(frappe.local.site) +BASE_URL = os.path.expanduser( + "~") + "/frappe-bench/sites/" + site_name + "/public" + + +date_format = "%Y-%m-%d %H:%M:%S.%f" +current_db_name = frappe.conf.get("db_name") +@frappe.whitelist(allow_guest = 1,methods = ['POST']) +def repair_checksheet_publish(vehicle = None, language = None + , publish_type = None , parent = None): + try: + req = json.loads(frappe.request.data) + logger_file = f'{vehicle} - {language} - {publish_type} - repair checksheet' + logger = frappe.logger(logger_file, allow_site=True, file_count=100) + logger.info( + f'Repair Checksheet Started-{vehicle}-{language}-{publish_type}-{parent}') + + folder_url = base_url+"/"+vehicle.replace(' ', '-')+"-TEST" + "/"+language + logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}') + + '''Publish Ready Flags''' + publish_repair_checksheet = 0 + repair_checksheet_tmp = [] + + '''Create Folder For Publish''' + create_publish_folders(folder_url) + file_path = folder_url + "/" + publish_type + "/" + \ + vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json' + + full_update_path = folder_url + "/" + publish_type + "/" + \ + vehicle.replace(' ', '-') + '-repair_check_sheet_full_update' + '.json' + + '''Append Published Data to Json''' + logger.info( + f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}') + vehicle_data = { + 'vehicle': vehicle, + 'vehicle_myid': vehicle_id, + 'publish_type': publish_type, + 'publish_description': release_description, + 'publish_language': language, + 'data': '' + } + + '''update existing global json file''' + if os.path.isfile(file_path): + with open(file_path) as f: + published_data = json.load(f) + for i in parent: + repair_checksheet = repair_checksheet_data(vehicle, language, + publish_type, i.parent_name) + if repair_checksheet['status'] == 1: + publish_repair_checksheet = 1 + repair_checksheet_tmp.append( + repair_checksheet['data'][0]) + + repair_checksheet_tmp = get_latest_data({'data': published_data['data']}, { + 'data': repair_checksheet_tmp}) + + else: + for i in parent.repiar_checksheet_publish_docs: + repair_checksheet = repair_checksheet_data( + vehicle, language, publish_type, i.parent_name) + + if repair_checksheet['status'] == 1: + publish_repair_checksheet = 1 + if len(repair_checksheet['data']): + repair_checksheet_tmp.append( + repair_checksheet['data'][0]) + + repair_checksheet_tmp = get_latest_data( + {'data': repair_checksheet_tmp}, {'data': []}) + + if publish_repair_checksheet == 1: + """ Save publish file """ + vehicle_data['data'] = repair_checksheet_tmp + + with open(file_path, 'w') as outfile: + outfile.write(json.dumps(vehicle_data, indent=4, default=str)) + + return 1, file_path + + except Exception as e: + logger.info( + f'{vehicle} - {language} - {publish_type} error in json creation' + str(e)) + frappe.throw('Failed To Publish') + + + +def repair_checksheet_data(vehicle=None, language=None, + publish_type=None, parent=None): + try: + logger_file = f'{vehicle} - {language} - {publish_type} - repair_checksheet_data' + logger = frappe.logger(logger_file, + allow_site=True, file_count=100) + logger.info( + f"start of fetching repair checksheet data - {vehicle} - {language}") + + repair_service_details = frappe.db.sql(f"""select vehicle,vehicle_id, + name,language,check_list_name, + keywords, + active_status,display_order,my_id + from `tabRepair Service Mapping` + where vehicle = '{vehicle}' + and language = '{language}' + and name = '{parent}'""", as_dict=1) + + for d in repair_service_details: + d['complaint'] = frappe.db.sql(f"""select complaint,remedial_action_ok, + remedial_action_not_ok,idx as display_order from `tabRepair Service Child` + where parent = '{d['name']}' + order by display_order ;""", as_dict=1) + + logger.info( + f"end of fetching repair checksheet data {vehicle} - {language}") + success_reponse['data'] = repair_service_details + success_reponse[ + 'message'] = f'Repair Checksheet Fecthed Succesfully for {vehicle} - {language} - repair_checksheet_data ' + return success_reponse + + except Exception as e: + failure_reponse['error'] = f"{vehicle} - {language} has following error - " + str( + e) + logger.error('error in repair checksheet' + str(e)) + return failure_reponse \ No newline at end of file diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.js b/smart_service/phase_2/doctype/feature_finder/feature_finder.js index f02455c..8cc7c72 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.js +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.js @@ -798,10 +798,12 @@ function custom_tab_html(frm) { frappe.call({ method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_spec_data", args: { - values: arr + values: arr, + doc_name:frm.doc.name }, callback: function (r) { if (r.message.status == "success") { + frm.set_value('is_published', '0') cur_frm.reload_doc() } } @@ -823,10 +825,12 @@ function custom_tab_html(frm) { frappe.call({ method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_image_data", args: { - values: arr + values: arr, + doc_name:frm.doc.name }, callback: function (r) { if (r.message.status == "success") { + frm.set_value('is_published', '0') cur_frm.reload_doc() } } @@ -848,7 +852,8 @@ function custom_tab_html(frm) { frappe.call({ method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_content_data", args: { - values: arr + values: arr, + doc_name:frm.doc.name }, callback: function (r) { if (r.message.status == "success") { @@ -1106,7 +1111,7 @@ function custom_tab_html(frm) { { label: 'Content', fieldname: 'content', - fieldtype: 'Data', + fieldtype: 'Small Text', default: content_dialog_value.content, }, diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.json b/smart_service/phase_2/doctype/feature_finder/feature_finder.json index 2ff5bcb..bcd9b0e 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.json +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.json @@ -209,7 +209,7 @@ ], "index_web_pages_for_search": 1, "links": [], - "modified": "2023-10-06 11:11:09.968138", + "modified": "2023-11-24 16:28:33.427222", "modified_by": "Administrator", "module": "Phase-2", "name": "Feature Finder", diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.py b/smart_service/phase_2/doctype/feature_finder/feature_finder.py index db85913..94adc86 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.py +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.py @@ -138,13 +138,16 @@ def insert_spec_data(doc_name, values, tab_ref): @frappe.whitelist() -def delete_spec_data(values): +def delete_spec_data(values,doc_name): try: val = json.loads(values) if len(val) > 0: for d in val: frappe.delete_doc("Feature Finder Specification", d) frappe.db.commit() + if doc_name: + frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name)) + frappe.db.commit() return {"status": "success"} except Exception as e: frappe.log_error("delete_spec_data", str(e)) @@ -184,13 +187,16 @@ def insert_image_data(doc_name, values, tab_ref): @frappe.whitelist() -def delete_image_data(values): +def delete_image_data(values,doc_name): try: val = json.loads(values) if len(val) > 0: for d in val: frappe.delete_doc("Feature Finder Images", d) frappe.db.commit() + if doc_name: + frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name)) + frappe.db.commit() return {"status": "success"} except Exception as e: frappe.log_error("delete_image_data", str(e)) @@ -230,13 +236,16 @@ def insert_content_data(doc_name, values, tab_ref): @frappe.whitelist() -def delete_content_data(values): +def delete_content_data(values,doc_name): try: val = json.loads(values) if len(val) > 0: for d in val: frappe.delete_doc("Feature Finder Content", d) frappe.db.commit() + if doc_name: + frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name)) + frappe.db.commit() return {"status": "success"} except Exception as e: frappe.log_error("delete_image_data", str(e)) @@ -294,7 +303,7 @@ def insert_tab_ref(docname, tab_ref): @frappe.whitelist() -def file_validation(image): +def file_validation(image=None): from smart_service.apis.utils import check_png_ext, check_img_ext,details_of_image if image: res = check_img_ext(image) diff --git a/smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py b/smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py index 7191570..b488d9c 100644 --- a/smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py +++ b/smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py @@ -9,7 +9,7 @@ class FeatureFinderMaster(Document): @frappe.whitelist() -def file_validation(image, name, value=None): +def file_validation(image =None, name=None, value=None): from smart_service.apis.utils import check_png_ext, check_img_ext,check_pdf_ext,details_of_image if image: res = check_pdf_ext(image) diff --git a/smart_service/phase_2/doctype/special_tool_information/special_tool_information.py b/smart_service/phase_2/doctype/special_tool_information/special_tool_information.py index 36935f0..344da8d 100644 --- a/smart_service/phase_2/doctype/special_tool_information/special_tool_information.py +++ b/smart_service/phase_2/doctype/special_tool_information/special_tool_information.py @@ -39,7 +39,7 @@ class SpecialToolInformation(Document): @frappe.whitelist() -def file_validation(image, name): +def file_validation(image=None, name=None): from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image if image: res = check_img_ext(image) @@ -61,7 +61,7 @@ def file_validation(image, name): return ret, 1 @frappe.whitelist() -def file_validation1(image, name): +def file_validation1(image=None, name=None): from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image if image: res = check_img_ext(image) @@ -108,7 +108,7 @@ def file_validation3(video=None): @frappe.whitelist() -def file_validation_child(name, value=None): +def file_validation_child(name=None, value=None): from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image, check_mp4_ext, get_file_size if name: child_data = frappe.db.sql( diff --git a/smart_service/phase_2/doctype/torque_category/__init__.py b/smart_service/phase_2/doctype/torque_category/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smart_service/phase_2/doctype/torque_category/test_torque_category.py b/smart_service/phase_2/doctype/torque_category/test_torque_category.py new file mode 100644 index 0000000..0a9f0f4 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_category/test_torque_category.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors +# See license.txt + +# import frappe +import unittest + +class TestTorqueCategory(unittest.TestCase): + pass diff --git a/smart_service/phase_2/doctype/torque_category/torque_category.js b/smart_service/phase_2/doctype/torque_category/torque_category.js new file mode 100644 index 0000000..39566aa --- /dev/null +++ b/smart_service/phase_2/doctype/torque_category/torque_category.js @@ -0,0 +1,8 @@ +// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +// For license information, please see license.txt + +frappe.ui.form.on('Torque Category', { + // refresh: function(frm) { + + // } +}); diff --git a/smart_service/phase_2/doctype/torque_category/torque_category.json b/smart_service/phase_2/doctype/torque_category/torque_category.json new file mode 100644 index 0000000..c7cb692 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_category/torque_category.json @@ -0,0 +1,41 @@ +{ + "actions": [], + "allow_rename": 1, + "creation": "2023-11-24 17:09:08.707664", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "category" + ], + "fields": [ + { + "fieldname": "category", + "fieldtype": "Data", + "label": "Category" + } + ], + "index_web_pages_for_search": 1, + "links": [], + "modified": "2023-11-24 17:09:08.707664", + "modified_by": "Administrator", + "module": "Phase-2", + "name": "Torque Category", + "owner": "Administrator", + "permissions": [ + { + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "System Manager", + "share": 1, + "write": 1 + } + ], + "sort_field": "modified", + "sort_order": "DESC" +} \ No newline at end of file diff --git a/smart_service/phase_2/doctype/torque_category/torque_category.py b/smart_service/phase_2/doctype/torque_category/torque_category.py new file mode 100644 index 0000000..0ff2363 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_category/torque_category.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +# For license information, please see license.txt + +# import frappe +from frappe.model.document import Document + +class TorqueCategory(Document): + pass diff --git a/smart_service/phase_2/doctype/torque_details/__init__.py b/smart_service/phase_2/doctype/torque_details/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smart_service/phase_2/doctype/torque_details/torque_details.json b/smart_service/phase_2/doctype/torque_details/torque_details.json new file mode 100644 index 0000000..8869475 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_details/torque_details.json @@ -0,0 +1,58 @@ +{ + "actions": [], + "allow_rename": 1, + "creation": "2023-11-24 17:09:41.181853", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "fastener_description", + "specification", + "qty", + "torque_nm", + "remark" + ], + "fields": [ + { + "fieldname": "fastener_description", + "fieldtype": "Data", + "in_list_view": 1, + "label": "Fastener Description" + }, + { + "fieldname": "specification", + "fieldtype": "Data", + "in_list_view": 1, + "label": "Specification" + }, + { + "fieldname": "qty", + "fieldtype": "Data", + "in_list_view": 1, + "label": "Qty" + }, + { + "fieldname": "torque_nm", + "fieldtype": "Data", + "in_list_view": 1, + "label": "Torque NM" + }, + { + "fieldname": "remark", + "fieldtype": "Data", + "in_list_view": 1, + "label": "Remark" + } + ], + "index_web_pages_for_search": 1, + "istable": 1, + "links": [], + "modified": "2023-11-24 17:11:33.158796", + "modified_by": "Administrator", + "module": "Phase-2", + "name": "Torque Details", + "owner": "Administrator", + "permissions": [], + "sort_field": "modified", + "sort_order": "DESC" +} \ No newline at end of file diff --git a/smart_service/phase_2/doctype/torque_details/torque_details.py b/smart_service/phase_2/doctype/torque_details/torque_details.py new file mode 100644 index 0000000..7e01309 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_details/torque_details.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +# For license information, please see license.txt + +# import frappe +from frappe.model.document import Document + +class TorqueDetails(Document): + pass diff --git a/smart_service/phase_2/doctype/torque_master/__init__.py b/smart_service/phase_2/doctype/torque_master/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smart_service/phase_2/doctype/torque_master/test_torque_master.py b/smart_service/phase_2/doctype/torque_master/test_torque_master.py new file mode 100644 index 0000000..cd2d49a --- /dev/null +++ b/smart_service/phase_2/doctype/torque_master/test_torque_master.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors +# See license.txt + +# import frappe +import unittest + +class TestTorqueMaster(unittest.TestCase): + pass diff --git a/smart_service/phase_2/doctype/torque_master/torque_master.js b/smart_service/phase_2/doctype/torque_master/torque_master.js new file mode 100644 index 0000000..fae5feb --- /dev/null +++ b/smart_service/phase_2/doctype/torque_master/torque_master.js @@ -0,0 +1,8 @@ +// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +// For license information, please see license.txt + +frappe.ui.form.on('Torque Master', { + // refresh: function(frm) { + + // } +}); diff --git a/smart_service/phase_2/doctype/torque_master/torque_master.json b/smart_service/phase_2/doctype/torque_master/torque_master.json new file mode 100644 index 0000000..e8e7b19 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_master/torque_master.json @@ -0,0 +1,68 @@ +{ + "actions": [], + "allow_rename": 1, + "creation": "2023-11-24 17:10:52.598215", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "category", + "column_break_gkwxt", + "sub_category", + "section_break_cmyhs", + "torque_table" + ], + "fields": [ + { + "fieldname": "category", + "fieldtype": "Link", + "in_list_view": 1, + "label": "Category", + "options": "Torque Category" + }, + { + "fieldname": "sub_category", + "fieldtype": "Link", + "in_list_view": 1, + "label": "Sub Category", + "options": "Torque Subcategory" + }, + { + "fieldname": "torque_table", + "fieldtype": "Table", + "label": "Torque Table", + "options": "Torque Details" + }, + { + "fieldname": "column_break_gkwxt", + "fieldtype": "Column Break" + }, + { + "fieldname": "section_break_cmyhs", + "fieldtype": "Section Break" + } + ], + "index_web_pages_for_search": 1, + "links": [], + "modified": "2023-11-24 17:12:07.675053", + "modified_by": "Administrator", + "module": "Phase-2", + "name": "Torque Master", + "owner": "Administrator", + "permissions": [ + { + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "System Manager", + "share": 1, + "write": 1 + } + ], + "sort_field": "modified", + "sort_order": "DESC" +} \ No newline at end of file diff --git a/smart_service/phase_2/doctype/torque_master/torque_master.py b/smart_service/phase_2/doctype/torque_master/torque_master.py new file mode 100644 index 0000000..d9a871c --- /dev/null +++ b/smart_service/phase_2/doctype/torque_master/torque_master.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +# For license information, please see license.txt + +# import frappe +from frappe.model.document import Document + +class TorqueMaster(Document): + pass diff --git a/smart_service/phase_2/doctype/torque_subcategory/__init__.py b/smart_service/phase_2/doctype/torque_subcategory/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py b/smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py new file mode 100644 index 0000000..c095c03 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors +# See license.txt + +# import frappe +import unittest + +class TestTorqueSubcategory(unittest.TestCase): + pass diff --git a/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js new file mode 100644 index 0000000..754ac2f --- /dev/null +++ b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js @@ -0,0 +1,8 @@ +// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +// For license information, please see license.txt + +frappe.ui.form.on('Torque Subcategory', { + // refresh: function(frm) { + + // } +}); diff --git a/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json new file mode 100644 index 0000000..957a762 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json @@ -0,0 +1,41 @@ +{ + "actions": [], + "allow_rename": 1, + "creation": "2023-11-24 17:09:21.527639", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "sub_category" + ], + "fields": [ + { + "fieldname": "sub_category", + "fieldtype": "Data", + "label": "Sub Category" + } + ], + "index_web_pages_for_search": 1, + "links": [], + "modified": "2023-11-24 17:09:21.527639", + "modified_by": "Administrator", + "module": "Phase-2", + "name": "Torque Subcategory", + "owner": "Administrator", + "permissions": [ + { + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "System Manager", + "share": 1, + "write": 1 + } + ], + "sort_field": "modified", + "sort_order": "DESC" +} \ No newline at end of file diff --git a/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py new file mode 100644 index 0000000..cb848b8 --- /dev/null +++ b/smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors +# For license information, please see license.txt + +# import frappe +from frappe.model.document import Document + +class TorqueSubcategory(Document): + pass diff --git a/smart_service/phase_2/doctype/training_information/training_information.py b/smart_service/phase_2/doctype/training_information/training_information.py index e5be4b8..e7d8fd9 100644 --- a/smart_service/phase_2/doctype/training_information/training_information.py +++ b/smart_service/phase_2/doctype/training_information/training_information.py @@ -21,7 +21,7 @@ class TrainingInformation(Document): @frappe.whitelist() -def file_validation_child(name, value=None): +def file_validation_child(name=None, value=None): from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image, check_mp4_ext, get_file_size if name: child_data = frappe.db.sql( diff --git a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js index f1ffbb3..2fa08c5 100644 --- a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js +++ b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js @@ -109,8 +109,6 @@ frappe.ui.form.on('Kilometer Mapping', { message: __('Saved'), indicator: 'green' }, 5); - // frm.refresh() - // frm.refresh_fields("config_kilometer"); cur_frm.reload_doc(); } } diff --git a/smart_service/transactions/doctype/publish/publish.js b/smart_service/transactions/doctype/publish/publish.js index 6e51942..8762648 100644 --- a/smart_service/transactions/doctype/publish/publish.js +++ b/smart_service/transactions/doctype/publish/publish.js @@ -1,6 +1,5 @@ // Copyright (c) 2021, Hard n Soft Technologies Pvt Ltd and contributors // For license information, please see license.txt - cur_frm.fields_dict["variant_mapping"].get_query = function (doc, cdt, cdn) { return { query: "smart_service.transactions.doctype.publish.publish.variant", @@ -17,8 +16,55 @@ cur_frm.fields_dict["vehicle"].get_query = function (doc, cdt, cdn) { }; let lang_set_first_time = true; +var html_variant = [] +function get_variant_mapping(frm){ + var res = $(cur_frm.fields_dict.variant_mapping_html.wrapper).empty(); + var vm =` + + + + + + + + + + ` + vm += `
        #Family CodeFuelTransmissionDrive
        ` + res.append(vm) + + if(frm.doc.variant_mapping_details){ + var variant_mapping_details_ = frm.doc.variant_mapping_details + variant_mapping_details_ = variant_mapping_details_.split("\n") + variant_mapping_details_.map(v=>{ + frappe.db.get_list('Variant Mapping', { + fields: ['family_code', 'fuel','transmission','drive'], + filters: { name: v } + }).then(val => { + if (val){ + var docv= ` + 1 + ${val[0]['family_code']} + ${val[0]['fuel']} + @${val[0]['transmission']} + @${val[0]['drive']} + ` + res.find('tbody').append(docv) + } + + }) + + + }) + + } + + +} frappe.ui.form.on("Publish", { + onload: function (frm) { + setTimeout(function mysam(){ $('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none') @@ -36,7 +82,7 @@ frappe.ui.form.on("Publish", { } if ( frm.doc.hasOwnProperty("feature_finder_publish_docs") && - frm.doc.feature_finder_publish_docs.length > 0 + frm.doc.feature_finder_publish_docs.length > 0 && frm.doc.docstatus == 1 ) { frm.set_df_property("feature_finder_publish_docs", "hidden", 0); cur_frm.set_df_property("feature_finder_publish_docs", "read_only", 1); @@ -210,6 +256,9 @@ frappe.ui.form.on("Publish", { } }, refresh: function (frm) { + // if(frm.doc.variant_mapping_details){ + // get_variant_mapping(frm) + // } setTimeout(function mysam(){ $('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none') $('[data-fieldname="qwik_service_publish"]').find('.grid-add-row').css('display', 'none') @@ -632,7 +681,8 @@ frappe.ui.form.on("Publish", { frm.set_value("version", r.message['data']); cur_frm.refresh_field("version"); frm.set_value("actual_published_date", frappe.datetime.nowdate()); - window.location.reload(); + // window.location.reload(); + cur_frm.reload_doc() } else{ frappe.msgprint("Failed To Publish") @@ -650,7 +700,7 @@ frappe.ui.form.on("Publish", { frm.refresh_fields(); frappe.msgprint( __( - "Publish Description: Only letters, numbers and / - () & , ' : are allowed." + // "Publish Description: Only letters, numbers and / - () & , ' : are allowed." ) ); } @@ -682,7 +732,6 @@ frappe.ui.form.on("Publish", { } }, variant_mapping: function (frm) { - setTimeout(function mysam(){ $('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none') $('[data-fieldname="qwik_service_publish"]').find('.grid-add-row').css('display', 'none') @@ -802,51 +851,51 @@ frappe.ui.form.on("Publish", { if(frm.doc.publish_module == "Qwik Service"){ add_qwik_service(frm) } - // if (frm.doc.docstatus === 0 && frm.doc.publish_status == 'To Publish' && frm.doc.publish_module=='Feature Finder') { + if (frm.doc.docstatus === 0 && frm.doc.publish_status == 'To Publish' && frm.doc.publish_module=='Feature Finder') { - // frm.get_field('feature_finder_publish').grid.add_custom_button(__('Add to Publish'), function () { - // let added = 0; - // frm.doc.feature_finder_publish.forEach(row => { - // if (row.active_status == "Active" || row.active_status == "Inactive") { - // let dulicate = false; - // if (frm.doc.feature_finder_publish_docs) { - // frm.doc.feature_finder_publish_docs.forEach(publishrow => { - // if (row.parent1 == publishrow.parent1) { + frm.get_field('feature_finder_publish').grid.add_custom_button(__('Add to Publish'), function () { + let added = 0; + frm.doc.feature_finder_publish.forEach(row => { + if (row.active_status == "Active" || row.active_status == "Inactive") { + let dulicate = false; + if (frm.doc.feature_finder_publish_docs) { + frm.doc.feature_finder_publish_docs.forEach(publishrow => { + if (row.parent1 == publishrow.parent1) { - // dulicate = true; - // } - // }); - // } - // if (dulicate === false) { - // let child = cur_frm.add_child("feature_finder_publish_docs"); - // child.vehicle = frm.doc.vehicle; - // child.parent1 = row.parent1; - // child.language = row.language; - // child.variant = row.variant; - // child.active_status = row.active_status; - // added += 1; - // frm.set_df_property('feature_finder_publish_docs','hidden',0) - // cur_frm.refresh_field("feature_finder_publish",'read_only',1); - // refresh_field("feature_finder_publish_docs"); - // cur_frm.set_df_property("feature_finder_publish",'read_only',1); - // document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[4] .setAttribute("style","display:none;"); - // document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[5] .setAttribute("style","display:none;"); - // } - // else{ - // frappe.msgprint(__(row.parent1 + 'Alraeady Added')) - // } - // cur_frm.refresh_field("feature_finder_publish_docs",'read_only',1); - // } - // }); - // if (added) { frappe.msgprint(__(added + " Doc(s) Added")); - // cur_frm.set_df_property('feature_finder_publish','read_only',1) - // cur_frm.set_df_property("feature_finder_publish_docs",'read_only',1); - // cur_frm.refresh_field("feature_finder_publish"); } - // else if(added==0) { frappe.msgprint(__("No Doc Added")); } - // }); + dulicate = true; + } + }); + } + if (dulicate === false) { + let child = cur_frm.add_child("feature_finder_publish_docs"); + child.vehicle = frm.doc.vehicle; + child.parent1 = row.parent1; + child.language = row.language; + child.variant = row.variant; + child.active_status = row.active_status; + added += 1; + frm.set_df_property('feature_finder_publish_docs','hidden',0) + cur_frm.refresh_field("feature_finder_publish",'read_only',1); + refresh_field("feature_finder_publish_docs"); + cur_frm.set_df_property("feature_finder_publish",'read_only',1); + document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[4] .setAttribute("style","display:none;"); + document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[5] .setAttribute("style","display:none;"); + } + else{ + frappe.msgprint(__(row.parent1 + 'Alraeady Added')) + } + cur_frm.refresh_field("feature_finder_publish_docs",'read_only',1); + } + }); + if (added) { frappe.msgprint(__(added + " Doc(s) Added")); + cur_frm.set_df_property('feature_finder_publish','read_only',1) + cur_frm.set_df_property("feature_finder_publish_docs",'read_only',1); + cur_frm.refresh_field("feature_finder_publish"); } + else if(added==0) { frappe.msgprint(__("No Doc Added")); } + }); - // frm.fields_dict.repair_checksheet_publish.grid.grid_buttons.find('.btn-custom').removeClass('btn-default').addClass('btn-primary'); - // } + frm.fields_dict.repair_checksheet_publish.grid.grid_buttons.find('.btn-custom').removeClass('btn-default').addClass('btn-primary'); + } if (frm.doc.publish_module == "Automotive System") { let km_name = frm.doc.variant_mapping + "-" + frm.doc.language + "-KM"; frappe.db.get_value( @@ -1024,30 +1073,30 @@ frappe.ui.form.on("Publish", { refresh_field("publish_documents"); frappe.msgprint(__(frm.doc.variant_mapping + " Added")); } - // if(frm.doc.feature_finder_publish_docs && frm.doc.publish_module=='Feature Finder'){ - // frm.doc.publish_documents.forEach(publishrow => { - // if (frm.doc.variant_mapping == publishrow.variant_mapping) { - // dulicate = true; - // } - // }); - // frappe.msgprint("Variant Mapping already added"); + if(frm.doc.feature_finder_publish_docs && frm.doc.publish_module=='Feature Finder'){ + frm.doc.publish_documents.forEach(publishrow => { + if (frm.doc.variant_mapping == publishrow.variant_mapping) { + dulicate = true; + } + }); + frappe.msgprint("Variant Mapping already added"); - // } - // if (dulicate === false && frm.doc.publish_module == 'Feature Finder') { - // let child = cur_frm.add_child("feature_finder_publish_docs"); - // child.variant_mapping = frm.doc.variant_mapping; - // child.vehicle = frm.doc.vehicle; - // child.parent1 = row.parent1; - // child.language = row.language; - // child.variant = row.variant; - // child.active_status = row.active_status; - // refresh_field("feature_finder_publish_docs"); - // frappe.msgprint(__(frm.doc.variant_mapping + " Added")); - // } - // else{ - // frappe.msgprint(__(row.parent1 + 'Alraeady Added')) + } + if (dulicate === false && frm.doc.publish_module == 'Feature Finder') { + let child = cur_frm.add_child("feature_finder_publish_docs"); + child.variant_mapping = frm.doc.variant_mapping; + child.vehicle = frm.doc.vehicle; + child.parent1 = row.parent1; + child.language = row.language; + child.variant = row.variant; + child.active_status = row.active_status; + refresh_field("feature_finder_publish_docs"); + frappe.msgprint(__(frm.doc.variant_mapping + " Added")); + } + else{ + frappe.msgprint(__(row.parent1 + 'Alraeady Added')) - // } + } }, km_report: function (frm, cdt, cdn) { if (frm.doc.publish_module == "Automotive System") { diff --git a/smart_service/transactions/doctype/publish/publish.json b/smart_service/transactions/doctype/publish/publish.json index c3d9b0e..a982eca 100644 --- a/smart_service/transactions/doctype/publish/publish.json +++ b/smart_service/transactions/doctype/publish/publish.json @@ -28,6 +28,8 @@ "variant", "variant_mapping_status", "add_variant_mapping_to_publish", + "section_break_u1haz", + "variant_mapping_html", "kilometer_mapping_section", "kilometer_mapping", "km_report", @@ -387,12 +389,22 @@ "hidden": 1, "label": "Special Tool Publish Docs", "options": "Special Tool Publish Docs" + }, + { + "fieldname": "section_break_u1haz", + "fieldtype": "Section Break", + "hidden": 1 + }, + { + "fieldname": "variant_mapping_html", + "fieldtype": "HTML", + "label": "Variant Mapping HTML" } ], "index_web_pages_for_search": 1, "is_submittable": 1, "links": [], - "modified": "2023-10-06 11:58:47.207441", + "modified": "2023-11-27 12:34:08.109971", "modified_by": "Administrator", "module": "Transactions", "name": "Publish", diff --git a/smart_service/transactions/doctype/publish/publish.py b/smart_service/transactions/doctype/publish/publish.py index 3079ce3..5df4bc8 100644 --- a/smart_service/transactions/doctype/publish/publish.py +++ b/smart_service/transactions/doctype/publish/publish.py @@ -630,6 +630,12 @@ def create_publish_folders(folder_url): if not os.path.isdir(folder_url + "/" + "Internal"): os.makedirs(folder_url + "/" + "Internal") + + if not os.path.isdir(folder_url + "/" + "Internal_Full_Update"): + os.makedirs(folder_url + "/" + "Internal_Full_Update") + + if not os.path.isdir(folder_url + "/" + "Global_Full_Update"): + os.makedirs(folder_url + "/" + "Global_Full_Update") return True except Exception as e: @@ -774,25 +780,34 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, 'data': '' } + latest_pub_data = vehicle_data.copy() + '''Publish Ready Flags''' publish_feature_finder = 0 feature_finder_tmp = [] '''Create Folder For Publish''' create_publish_folders(folder_url) + full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update" file_path = folder_url + "/" + publish_type + "/" + \ vehicle.replace(' ', '-') + '-feature_finder' + '.json' global_file_path = folder_url + "/" + 'Global' + "/" + \ vehicle.replace(' ', '-') + '-feature_finder' + '.json' + full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + global_full_update_path = folder_url + "/" + "Global_Full_Update" + check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + '''Append Published Data to Json''' + logger.info( - f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}') + f'Feature Data Append Start::{vehicle}-{language}-{publish_type}') '''update existing global json file''' if os.path.isfile(global_file_path) and publish_type == 'Internal': - - with open(global_file_path) as f: - published_data = json.load(f) + if os.path.isfile(check_glbl_full_updte): + with open(check_glbl_full_updte) as f: + published_data = json.load(f) for i in parent: feature_finder = feature_finder_data(vehicle, language, publish_type, i.parent1, i.variant) @@ -801,7 +816,8 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, feature_finder_tmp.append( feature_finder['data'][0]) feature_finder_tmp = create_df(feature_finder_tmp) - vehi_data = compare_get_data({'data': published_data['data']}, { + latest_pub_data['data'] = feature_finder_tmp + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { 'data': feature_finder_tmp}) if vehi_data: find_distinct = set(exisitng_var) @@ -811,7 +827,14 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, vehi_data, new_variant_name, feature_finder_tmp) elif os.path.isfile(file_path) and publish_type == 'Global': - with open(global_file_path) as f: + frappe.log_error('inside') + full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update" + #Remove Internal Full Update File On Global Publish + existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + frappe.log_error('file_path' + str(existing_internal_full_update)) + if os.path.isfile(existing_internal_full_update): + os.remove(existing_internal_full_update) + with open(check_glbl_full_updte) as f: published_data = json.load(f) for i in parent: feature_finder = feature_finder_data(vehicle, language, @@ -821,7 +844,8 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, feature_finder_tmp.append( feature_finder['data'][0]) feature_finder_tmp = create_df(feature_finder_tmp) - vehi_data = compare_get_data({'data': published_data['data']}, { + latest_pub_data['data'] = feature_finder_tmp + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { 'data': feature_finder_tmp}) if vehi_data: find_distinct = set(exisitng_var) @@ -830,27 +854,78 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, vehi_data = add_new_val( vehi_data, new_variant_name, feature_finder_tmp) else: - for i in parent: - feature_finder = feature_finder_data( - vehicle, language, publish_type, i.parent1, i.variant) - if feature_finder['status'] == 1: - publish_feature_finder = 1 - - if len(feature_finder['data']): - feature_finder_tmp.append( - feature_finder['data'][0]) - else: - frappe.throw('failed to publish') - - feature_finder_tmp = get_latest_data( - {'data': feature_finder_tmp}, {'data': []}) - - vehi_data = create_df(feature_finder_tmp) - + #Existing Global or Internal Full Update + + internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update" + check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + if os.path.isfile(check_full_update_file): + with open(check_full_update_file) as f: + published_data = json.load(f) + for i in parent: + frappe.log_error("i",str(parent)) + feature_finder = feature_finder_data( + vehicle, language, publish_type, i.parent1, i.variant) + if feature_finder['status'] == 1: + publish_feature_finder = 1 + + if len(feature_finder['data']): + feature_finder_tmp.append( + feature_finder['data'][0]) + else: + frappe.throw('failed to publish') + feature_finder_tmp = create_df(feature_finder_tmp) + latest_pub_data['data'] = feature_finder_tmp + + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { + 'data': feature_finder_tmp}) + if vehi_data: + find_distinct = set(exisitng_var) + new_variant_name = [ + x for x in new_variant if x not in find_distinct] + vehi_data = add_new_val( + vehi_data, new_variant_name, feature_finder_tmp) + + vehicle_data['data'] = vehi_data + + #First Time Global Or Internal + else: + for i in parent: + frappe.log_error("619",str(parent)) + feature_finder = feature_finder_data( + vehicle, language, publish_type, i.parent1, i.variant) + + if feature_finder['status'] == 1: + publish_feature_finder = 1 + if len(feature_finder['data'])>0: + feature_finder_tmp.append( + feature_finder['data'][0]) + else: + frappe.throw('failed to publish') + feature_finder_tmp = get_latest_data( + {'data': feature_finder_tmp}, {'data': []}) + frappe.log_error(str(feature_finder_tmp)) + vehi_data = create_df(feature_finder_tmp) + latest_pub_data['data'] = vehi_data + + if publish_type == "Global": + full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update" + #Remove Internal Full Update File On Global Publish + existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + frappe.log_error('file_path' + str(existing_internal_full_update)) + if os.path.isfile(existing_internal_full_update): + os.remove(existing_internal_full_update) """ Save publish file """ vehicle_data['data'] = vehi_data + with open(file_path, 'w') as outfile: + outfile.write(json.dumps(latest_pub_data, indent=4, default=str)) + + + with open(full_update_file, 'w') as outfile: outfile.write(json.dumps(vehicle_data, indent=4, default=str)) + + + logger.info( f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}') @@ -862,57 +937,68 @@ def feature_finder_publish(vehicle=None, vehicle_id=None, frappe.throw('Failed To Publish') -def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None, - language=None, publish_type=None, - release_description=None, parent=None): +def qwik_service_publish(vehicle=None, vehicle_id=None, + language=None, publish_type=None, + release_description=None, parent=None, + variant=None): try: - logger_file = f'{variant} - {language} - {publish_type} - Qwik_service' - logger = frappe.logger(logger_file, allow_site=True, file_count=100) + logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}' + logger = frappe.logger(logger_file, + allow_site=True, file_count=100) logger.info( - f'Qwik_service-{variant}-{language}-{publish_type}-{parent}') + f"start of fetching qwik_service data - {vehicle} - {language} - {module_name} - {variant}") folder_url = base_url+"/"+vehicle.replace(' ', '-') + "/"+language logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}') - '''Publish Ready Flags''' - publish_qwik_service = 0 - qwik_service_tmp = [] - qwik_service = '' - - '''Create Folder For Publish''' - create_publish_folders(folder_url) - file_path = folder_url + "/" + publish_type + "/" + \ - vehicle.replace(' ', '-') + '-qwik_service' + '.json' - global_file_path = folder_url + "/" + "Global" + "/" + \ - vehicle.replace(' ', '-') + '-qwik_service' + '.json' - exising_internal_path = folder_url + "/" + "Internal" + "/" + \ - vehicle.replace(' ', '-') + '-qwik_service' + '.json' - '''Append Published Data to Json''' - logger.info( - f'Qwik_service Data Append Start::{vehicle}-{language}-{publish_type}') + '''Final Json''' vehicle_data = { 'vehicle': vehicle, 'vehicle_myid': vehicle_id, - 'variant': variant, 'publish_type': publish_type, 'publish_description': release_description, 'publish_language': language, 'data': '' } - if os.path.isfile(global_file_path) and publish_type == 'Internal': - with open(global_file_path) as f: - published_data = json.load(f) + latest_pub_data = vehicle_data.copy() + + '''Publish Ready Flags''' + publish_qwik_service = 0 + qwik_service_tmp = [] + + '''Create Folder For Publish''' + create_publish_folders(folder_url) + full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update" + file_path = folder_url + "/" + publish_type + "/" + \ + vehicle.replace(' ', '-') + '- qwik_service' + '.json' + global_file_path = folder_url + "/" + 'Global' + "/" + \ + vehicle.replace(' ', '-') + '- qwik_service' + '.json' + + full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json' + # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + global_full_update_path = folder_url + "/" + "Global_Full_Update" + check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json' + + '''Append Published Data to Json''' + logger.info( + f' qwik_service Data Append Start::{vehicle}-{language}-{publish_type}') + '''update existing global json file''' + if os.path.isfile(global_file_path) and publish_type == 'Internal': + if os.path.isfile(check_glbl_full_updte): + with open(check_glbl_full_updte) as f: + published_data = json.load(f) for i in parent: - qwik_service = qwik_service_data( - language, publish_type, i.variant, i.parent1, vehicle) - if qwik_service['status'] == 1 and len(qwik_service['data']) > 0: + qwik_service = qwik_service_data(vehicle, language, + publish_type, i.parent1, i.variant) + if qwik_service['status'] == 1: publish_qwik_service = 1 qwik_service_tmp.append( qwik_service['data'][0]) qwik_service_tmp = create_df(qwik_service_tmp) - vehi_data = compare_get_data({'data': published_data['data']}, { + latest_pub_data['data'] = qwik_service_tmp + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { 'data': qwik_service_tmp}) if vehi_data: find_distinct = set(exisitng_var) @@ -921,20 +1007,26 @@ def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None, vehi_data = add_new_val( vehi_data, new_variant_name, qwik_service_tmp) - elif os.path.isfile(global_file_path) and publish_type == 'Global': - with open(global_file_path) as f: + elif os.path.isfile(file_path) and publish_type == 'Global': + frappe.log_error('inside') + full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update" + #Remove Internal Full Update File On Global Publish + existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json' + frappe.log_error('file_path' + str(existing_internal_full_update)) + if os.path.isfile(existing_internal_full_update): + os.remove(existing_internal_full_update) + with open(check_glbl_full_updte) as f: published_data = json.load(f) for i in parent: - qwik_service = qwik_service_data( - language, publish_type, i.variant, i.parent1, vehicle) - if qwik_service['status'] == 1 and len(qwik_service['data']) > 0: + qwik_service = qwik_service_data(vehicle, language, + publish_type, i.parent1, i.variant) + if qwik_service['status'] == 1: publish_qwik_service = 1 qwik_service_tmp.append( qwik_service['data'][0]) - qwik_service_tmp = create_df(qwik_service_tmp) - - vehi_data = compare_get_data({'data': published_data['data']}, { + latest_pub_data['data'] = qwik_service_tmp + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { 'data': qwik_service_tmp}) if vehi_data: find_distinct = set(exisitng_var) @@ -943,31 +1035,92 @@ def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None, vehi_data = add_new_val( vehi_data, new_variant_name, qwik_service_tmp) else: - for i in parent: - qwik_service = qwik_service_data( - language, publish_type, i.variant, i.parent1, vehicle) - - if qwik_service['status'] == 1 and len(qwik_service['data']) > 0: - publish_qwik_service = 1 - qwik_service_tmp.append( - qwik_service['data'][0]) - else: - frappe.throw('failed to publish') - - qwik_service_tmp = get_latest_data( - {'data': qwik_service_tmp}, {'data': []}) - - vehi_data = create_df(qwik_service_tmp) - + #Existing Global or Internal Full Update + + internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update" + check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json' + if os.path.isfile(check_full_update_file): + with open(check_full_update_file) as f: + published_data = json.load(f) + for i in parent: + qwik_service = qwik_service_data( + vehicle, language, publish_type, i.parent1, i.variant) + if qwik_service['status'] == 1: + publish_qwik_service = 1 + + + if len(qwik_service['data']): + qwik_service_tmp.append( + qwik_service['data'][0]) + else: + frappe.throw('failed to publish') + qwik_service_tmp = create_df(qwik_service_tmp) + latest_pub_data['data'] = qwik_service_tmp + + vehi_data = compare_get_data({'data': latest_pub_data['data']}, { + 'data': qwik_service_tmp}) + if vehi_data: + find_distinct = set(exisitng_var) + new_variant_name = [ + x for x in new_variant if x not in find_distinct] + vehi_data = add_new_val( + vehi_data, new_variant_name, qwik_service_tmp) + if vehi_data: + + find_distinct = set(exisitng_var) + new_variant_name = [ + x for x in new_variant if x not in find_distinct] + vehi_data = add_new_val( + vehi_data, new_variant_name, qwik_service_tmp) + vehicle_data['data'] = vehi_data + + #First Time Global Or Internal + else: + for i in parent: + frappe.log_error("211",str(i)) + qwik_service = qwik_service_data( + vehicle, language, publish_type, i.parent1, i.variant) + + if qwik_service['status'] == 1: + publish_qwik_service = 1 + if len(qwik_service['data'])>0: + qwik_service_tmp.append( + qwik_service['data'][0]) + else: + frappe.throw('failed to publish') + qwik_service_tmp = get_latest_data( + {'data': qwik_service_tmp}, {'data': []}) + frappe.log_error(str(qwik_service_tmp)) + vehi_data = create_df(qwik_service_tmp) + latest_pub_data['data'] = vehi_data + if publish_type == "Global": + full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update" + #Remove Internal Full Update File On Global Publish + existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json' + frappe.log_error('file_path' + str(existing_internal_full_update)) + if os.path.isfile(existing_internal_full_update): + os.remove(existing_internal_full_update) """ Save publish file """ vehicle_data['data'] = vehi_data + with open(file_path, 'w') as outfile: + outfile.write(json.dumps(latest_pub_data, indent=4, default=str)) + + + with open(full_update_file, 'w') as outfile: outfile.write(json.dumps(vehicle_data, indent=4, default=str)) + + + + logger.info( + f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}') - except Exception as e: - logger.error( - f'{variant} - {language} - {publish_type} - Qwik_service'+str(e)) + return 1, file_path + except Exception as e: + logger.info( + f'{vehicle} - {language} - {publish_type} - {module_name} error in json creation' + str(e)) + frappe.throw('Failed To Publish') def special_tool_publish(vehicle, vehicle_id, publish_type, release_description, @@ -987,9 +1140,9 @@ def special_tool_publish(vehicle, vehicle_id, '''Create Folder For Publish''' create_publish_folders(folder_url) - file_path = folder_url + "/" + publish_type + "/" + \ + file_path = folder_url + "/" + publish_type+"_Full_Update" + "/"+ \ vehicle.replace(' ', '-') + '-special_tool' + '.json' - existing_global = folder_url + "/" + "Global" + "/" + \ + existing_global = folder_url + "/" + "Global_Full_Update" + "/" + \ vehicle.replace(' ', '-') + '-special_tool' + '.json' '''Append Published Data to Json''' @@ -1032,17 +1185,48 @@ def special_tool_publish(vehicle, vehicle_id, 'data': special_tool_tmp}) else: - for i in parent: - special_tool = special_tool_data( - vehicle, publish_type, i.parent1) + internal_full_update_check = folder_url + "/" +"Internal_Full_Update" + check_inter_full_update_file = internal_full_update_check +"/"+ \ + vehicle.replace(' ', '-') + '-special_tool' + '.json' - if special_tool['status'] == 1: - publish_special_tool = 1 - if len(special_tool['data']) > 0: + frappe.log_error(str(check_inter_full_update_file)) + + # check if Internal Full Update Already Exist + if os.path.isfile(check_inter_full_update_file): + frappe.log_error('inside_full_update') + with open(check_inter_full_update_file) as f: + published_data = json.load(f) + for i in parent: + + special_tool = special_tool_data( + vehicle, publish_type, i.parent1) + if special_tool['status'] == 1 and len(special_tool['data']) > 0: + publish_special_tool = 1 special_tool_tmp.append( special_tool['data'][0]) - special_tool_tmp = get_latest_data( - {'data': special_tool_tmp}, {'data': []}) + + special_tool_tmp = get_latest_data({'data': published_data['data']}, { + 'data': special_tool_tmp}) + else: + + for i in parent: + special_tool = special_tool_data( + vehicle, publish_type, i.parent1) + + if special_tool['status'] == 1: + publish_special_tool = 1 + if len(special_tool['data']) > 0: + special_tool_tmp.append( + special_tool['data'][0]) + special_tool_tmp = get_latest_data( + {'data': special_tool_tmp}, {'data': []}) + if publish_type == "Global": + #Remove Internal Full Update File On Global Publish + internal_full_update_check = folder_url + "/" +"Internal_Full_Update" + check_inter_full_update_file = internal_full_update_check +"/"+ \ + vehicle.replace(' ', '-') + '-special_tool' + '.json' + if os.path.isfile(check_inter_full_update_file): + os.remove(check_inter_full_update_file) if publish_special_tool == 1: """ Save publish file """ diff --git a/smart_service/transactions/doctype/variant_mapping/variant_mapping.py b/smart_service/transactions/doctype/variant_mapping/variant_mapping.py index f8e0a2d..ef17b18 100644 --- a/smart_service/transactions/doctype/variant_mapping/variant_mapping.py +++ b/smart_service/transactions/doctype/variant_mapping/variant_mapping.py @@ -1,60 +1,109 @@ # Copyright (c) 2021, Hard n Soft Technologies Pvt Ltd and contributors # For license information, please see license.txt +import sys import frappe from frappe.model.document import Document current_db_name = frappe.conf.get("db_name") -import sys -sys.tracebacklimit=0 +sys.tracebacklimit = 0 + + class VariantMapping(Document): - def validate(self): - - assets = frappe.db.sql("""select category,idx,active_status,attach_file from {0}.`tabVariant Mapping_Assets` where parent = '{1}'""".format(current_db_name,self.name),as_dict=True) - for x in assets: - for y in self.get("assets"): - if x.idx == y.idx and y.category == x.category: - if y.active_status != x.active_status: - y.published = "0" - frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(current_db_name,self.name,str(y.idx))) - frappe.db.commit() - if y.attach_file != x.attach_file: - y.published = "0" - frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(current_db_name,self.name,str(y.idx))) - frappe.db.commit() - - if self.is_new(): - data = frappe.db.sql("""select name from {0}.`tabVariant Mapping` + def validate(self): + + assets = frappe.db.sql("""select category,idx,active_status,attach_file from {0}.`tabVariant Mapping_Assets` where parent = '{1}'""".format( + current_db_name, self.name), as_dict=True) + for x in assets: + for y in self.get("assets"): + if x.idx == y.idx and y.category == x.category: + if y.active_status != x.active_status: + y.published = "0" + frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format( + current_db_name, self.name, str(y.idx))) + frappe.db.commit() + if y.attach_file != x.attach_file: + y.published = "0" + frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format( + current_db_name, self.name, str(y.idx))) + frappe.db.commit() + + if self.is_new(): + data = frappe.db.sql("""select name from {0}.`tabVariant Mapping` where variant = '{1}' and fuel = '{2}' and - transmission = '{3}' and drive = '{4}'""".format(current_db_name,self.variant,self.fuel,self.transmission,self.drive),as_dict=True) - if data: - if data != self.name: - frappe.throw('Variant Mapping already created as {0} for {1}'.format(data[0]['name'],self.variant)) - if self.assets: - for x in self.get("assets"): - if not x.language: - frappe.throw("Please select the language in row no " + str(x.idx)) - if not x.attach_file: - frappe.throw("Please select the attach file in row no " + str(x.idx)) + transmission = '{3}' and drive = '{4}'""".format(current_db_name, self.variant, self.fuel, self.transmission, self.drive), as_dict=True) + if data: + if data != self.name: + frappe.throw('Variant Mapping already created as {0} for {1}'.format( + data[0]['name'], self.variant)) + + create_module_publish(self.vehicle, self.variant) + + if self.assets: + for x in self.get("assets"): + if not x.language: + frappe.throw( + "Please select the language in row no " + str(x.idx)) + if not x.attach_file: + frappe.throw( + "Please select the attach file in row no " + str(x.idx)) + @frappe.whitelist() def variant(doctype, txt, searchfield, start, page_len, filters): - return frappe.db.sql(""" select name,variant,vehicle,vehicle_segment,active_status + return frappe.db.sql(""" select name,variant,vehicle,vehicle_segment,active_status FROM {0}.tabVariant where - (active_status like "%%%(txt)s%%" or vehicle_segment like "%%%(txt)s%%" or name like "%%%(txt)s%%" or variant like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len}) + (active_status like "%%%(txt)s%%" or vehicle_segment like "%%%(txt)s%%" or name like "%%%(txt)s%%" or variant like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len}) + @frappe.whitelist() def fuel(doctype, txt, searchfield, start, page_len, filters): - return frappe.db.sql(""" select name FROM {0}.tabFuel where - (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len}) + return frappe.db.sql(""" select name FROM {0}.tabFuel where + (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len}) + @frappe.whitelist() def transmission(doctype, txt, searchfield, start, page_len, filters): - return frappe.db.sql(""" select name FROM {0}.tabTransmission where - (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len}) + return frappe.db.sql(""" select name FROM {0}.tabTransmission where + (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len}) + @frappe.whitelist() def drive(doctype, txt, searchfield, start, page_len, filters): - return frappe.db.sql(""" select name FROM {0}.tabDrive where - (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len}) + return frappe.db.sql(""" select name FROM {0}.tabDrive where + (name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len}) + + +@frappe.whitelist(allow_guest=1) +def create_module_publish(vehicle, variant): + language_list = frappe.db.sql( + ''' select lang_code from `tabCustom Languages`; ''', as_dict=1) + + if language_list: + for l in language_list: + doc = frappe.get_doc({ + 'doctype': 'Module Publish Mapping', + "vehicle": vehicle, + "variant": variant, + "language": l['lang_code'], + "publish_type": "Internal" + }) + doc.save() + doc1 = frappe.get_doc({ + 'doctype': 'Module Publish Mapping', + "vehicle": vehicle, + "variant": variant, + "language": l['lang_code'], + "publish_type": "Global" + }) + doc1.save() + frappe.log_error('Doc Global', str(doc1)) + + +def update_repair_service(vehicle): + repair_data = frappe.db.sql( + f'''select * from `tabModule Publish Mapping` where vehicle = '{vehicle}' and repairservice_check_sheet= 1;''') + + + From 8273fdc95a8af14b9def62f968ce24a311e79b91 Mon Sep 17 00:00:00 2001 From: venkata akhil Date: Thu, 30 Nov 2023 09:54:10 +0530 Subject: [PATCH 5/7] Feature finder child table data to small text field --- smart_service/phase_2/doctype/feature_finder/feature_finder.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.js b/smart_service/phase_2/doctype/feature_finder/feature_finder.js index f02455c..55bc414 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.js +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.js @@ -1106,7 +1106,7 @@ function custom_tab_html(frm) { { label: 'Content', fieldname: 'content', - fieldtype: 'Data', + fieldtype: 'Small Text', default: content_dialog_value.content, }, From 1b884ab2f77072cb123ee75c031cef3a475c8b8b Mon Sep 17 00:00:00 2001 From: venkataakhil Date: Thu, 30 Nov 2023 09:58:33 +0530 Subject: [PATCH 6/7] KM Mapping merge fix --- .../doctype/kilometer_mapping/kilometer_mapping.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py index 2ddd432..67d84a0 100644 --- a/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py +++ b/smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.py @@ -5,7 +5,6 @@ import frappe from frappe.model.document import Document import sys from frappe import _ -<<<<<<< HEAD sys.tracebacklimit = 0 @@ -32,7 +31,6 @@ def get_system(doc): return system -======= sys.tracebacklimit=0 current_db_name = frappe.conf.get("db_name") class KilometerMapping(Document): @@ -53,7 +51,6 @@ def get_system(doc): system = frappe.db.sql("""select sub_systems,mat from {0}.`tabSystem Mapping_Sub System` where parent = '{1}' and procedure_is_published = '0'""".format(current_db_name,s.name),as_dict=True) return system ->>>>>>> bb4600bece5c8a0a27d7e8515d386801fc23a646 @frappe.whitelist() def get_subsystem(doc): current_db_name = frappe.conf.get("db_name") @@ -86,7 +83,6 @@ def update_duplicate_km(doc, kilometer): @frappe.whitelist() -<<<<<<< HEAD def get_config_kilometer(doc, docname): res = frappe.db.sql( f"""select kilometer,mat,sub_systems,applicable,row_name from `tabKilometer Mapping_Items` where parent='{doc}'""", as_dict=1) @@ -98,7 +94,7 @@ def get_config_kilometer(doc, docname): ress.save() frappe.db.commit() return {"status": "success"} -======= + def get_config_kilometer(doc,docname): res = frappe.db.sql(f"""select kilometer,mat,sub_systems,applicable,row_name from `tabKilometer Mapping_Items` where parent='{doc}'""",as_dict=1) ress = frappe.get_doc("Kilometer Mapping",docname) @@ -123,4 +119,3 @@ def get_config_kilometer(doc,docname): # km_filters =frappe.db.sql(f"""select * from `tabKilometer Mapping_Items` where parent ="{parent}" and kilometer ="{km}" order by length(kilometer),kilometer """,as_dict = 1) # return km_filters ->>>>>>> bb4600bece5c8a0a27d7e8515d386801fc23a646 From 39ea33ca609800393574995edb26def39376e399 Mon Sep 17 00:00:00 2001 From: venkataakhil Date: Thu, 30 Nov 2023 12:26:57 +0530 Subject: [PATCH 7/7] changes in variant mapping --- smart_service/apis/v2/master.py | 184 ++++++++++-------- .../doctype/role_master/role_master.json | 9 +- .../doctype/feature_finder/feature_finder.js | 6 +- .../doctype/feature_finder/feature_finder.py | 2 +- .../transactions/doctype/publish/publish.py | 4 +- .../variant_mapping/variant_mapping.json | 3 +- 6 files changed, 120 insertions(+), 88 deletions(-) diff --git a/smart_service/apis/v2/master.py b/smart_service/apis/v2/master.py index 0c7942e..d04e9cf 100644 --- a/smart_service/apis/v2/master.py +++ b/smart_service/apis/v2/master.py @@ -19,23 +19,23 @@ date_format = "%Y-%m-%d %H:%M:%S.%f" current_db_name = frappe.conf.get("db_name") # style="""""" @@ -62,10 +62,10 @@ def get_role_mapping(): data['module'] = module_details if lsdt: role_maping_details = frappe.db.sql(f"""SELECT role,my_id,display_order,modules,active_status,modified from `tabRole Master` - where modified>='{lsdt}' order by display_order;""", as_dict=1) + where active_status = 'Active' and modified>='{lsdt}' order by display_order;""", as_dict=1) else: role_maping_details = frappe.db.sql(f"""SELECT role,my_id,display_order,modules,active_status,modified from `tabRole Master` - order by display_order;""", as_dict=1) + where active_status = 'Active' order by display_order;""", as_dict=1) data['role_mapping'] = role_maping_details return {"status": 1, "error": None, 'lsdt': utils.now(), "data": data} @@ -188,7 +188,7 @@ def new_publish(): data = [] if module == 'Feature Finder': - + flag, data = get_feature_finder(vehicle, variant, language) elif module == 'Repair/Service Check Sheet': @@ -197,98 +197,119 @@ def new_publish(): elif module == 'QWIK Service': flag, data = get_qwik_service(variant, language) CLEANR = re.compile('<.*?>') - CLEANR_back_slash = re.compile("\"") - ul_html='
          ' - li_html='
        • ' - cling_li='
        • ' - cling_last='
        ' - find_last_ul='
      ' - + CLEANR_back_slash = re.compile("\"") + ul_html = '
        ' + li_html = '
      • ' + cling_li = '
      • ' + cling_last = '
      ' + find_last_ul = '
    ' + for dt in data: - if len(dt['consumables'])>0: - + if len(dt['consumables']) > 0: + for con in dt['consumables']: if con['content']: con['content'] = re.sub(CLEANR, '', con['content']) # if len(dt['pre_work'])>0: for pw in dt['pre_work']: - reg_str = "

    " + reg_str = "

    " reg_str1 = "

    " if pw['content']: res = str(pw['content']).find('
    ') res1 = str(pw['content']).find('

    ') if res == 1: - pw['content'] = re.sub('

    ','',pw['content']) - pw['content'] = re.sub('
    ','',pw['content']) - pw['content'] = re.sub('

    ','

    ',pw['content'] ) - pw['content'] = re.sub('

    ','
    ',pw['content'] ) - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = re.sub( + '
    ', '', pw['content']) + pw['content'] = re.sub( + '
    ', '', pw['content']) + pw['content'] = re.sub( + '

    ', '

    ', pw['content']) + pw['content'] = re.sub( + '

    ', '
    ', pw['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) pw['content'] = cnverted_data else: - pw['content'] = '
    '+pw['content']+'
    ' - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = '
    ' + \ + pw['content']+'
    ' + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) frappe.log_error(str('if')+pw['content']) pw['content'] = cnverted_data if res1 == 1: - pw['content'] = re.sub('
    ','',pw['content']) - pw['content'] = re.sub('
    ','',pw['content']) - pw['content'] = re.sub('

    ','

    ',pw['content'] ) - pw['content'] = re.sub('

    ','
    ',pw['content'] ) - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = re.sub( + '
    ', '', pw['content']) + pw['content'] = re.sub( + '
    ', '', pw['content']) + pw['content'] = re.sub( + '

    ', '

    ', pw['content']) + pw['content'] = re.sub( + '

    ', '
    ', pw['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) pw['content'] = cnverted_data else: frappe.log_error(str('else')+pw['content']) - pw['content'] = '
    '+pw['content']+'
    ' - cnverted_data = re.sub(CLEANR_back_slash,"'",pw['content']) + pw['content'] = '
    ' + \ + pw['content']+'
    ' + cnverted_data = re.sub( + CLEANR_back_slash, "'", pw['content']) frappe.log_error(str('if')+pw['content']) pw['content'] = cnverted_data - if dt['qwik_procedure']: for qp in dt['qwik_procedure']['LHS']: if qp['content']: - qp['content'] = re.sub('
      ', ul_html,qp['content'] ) - qp['content'] = re.sub('
    • ',li_html,qp['content']) - clsing_li_tag = re.sub('
    • ',cling_li,qp['content']) + qp['content'] = re.sub( + '
        ', ul_html, qp['content']) + qp['content'] = re.sub( + '
      • ', li_html, qp['content']) + clsing_li_tag = re.sub( + '
      • ', cling_li, qp['content']) qp['content'] = clsing_li_tag - res = re.sub(find_last_ul,cling_last,qp['content']) + res = re.sub( + find_last_ul, cling_last, qp['content']) qp['content'] = res - - cnverted_data = re.sub(CLEANR_back_slash,"'",qp['content']) + + cnverted_data = re.sub( + CLEANR_back_slash, "'", qp['content']) qp['content'] = cnverted_data - for qp_rhs in dt['qwik_procedure']['RHS']: if qp_rhs: - qp_rhs['content'] = re.sub('
          ', ul_html,qp_rhs['content'] ) - qp_rhs['content'] = re.sub('
        • ',li_html,qp_rhs['content']) - qp_rhs['content'] = re.sub('
        • ',cling_li,qp_rhs['content']) - qp_rhs['content'] = re.sub(find_last_ul,cling_last,qp_rhs['content']) - cnverted_data = re.sub(CLEANR_back_slash,"'",qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
            ', ul_html, qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
          • ', li_html, qp_rhs['content']) + qp_rhs['content'] = re.sub( + '
          • ', cling_li, qp_rhs['content']) + qp_rhs['content'] = re.sub( + find_last_ul, cling_last, qp_rhs['content']) + cnverted_data = re.sub( + CLEANR_back_slash, "'", qp_rhs['content']) qp_rhs['content'] = cnverted_data - - elif module == 'Training Information': # flag, data = get_training_information(vehicle, language) pass elif module == 'Mahindra Special Tool Information': - CLEANR_back_slash = re.compile("\"") + CLEANR_back_slash = re.compile("\"") html_code_ul = '
              ' - html_code_li = '
            • ' + html_code_li = '
            • ' flag, data = get_special_tool_information(vehicle) for dt in data: - if len(dt['instructions'])>0: + if len(dt['instructions']) > 0: for c_t in dt['instructions']: if c_t['content_type'] == 'Description': if c_t['content']: - c_t['content'] = html_code_ul+html_code_li+c_t['content']+'
            • '+'
            ' - cnverted_data = re.sub(CLEANR_back_slash,"'",c_t['content']) + c_t['content'] = html_code_ul + \ + html_code_li+c_t['content']+''+'
          ' + cnverted_data = re.sub( + CLEANR_back_slash, "'", c_t['content']) c_t['content'] = cnverted_data - elif module == 'PDI Inspection': pass @@ -312,7 +333,7 @@ def new_publish(): return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None} -@frappe.whitelist(methods=['POST'],allow_guest = 1) +@frappe.whitelist(methods=['POST'], allow_guest=1) def new_publish1(): req = json.loads(frappe.request.data) try: @@ -323,7 +344,7 @@ def new_publish1(): language = req['language'] val = input_validation(req) - + if val != '' or val != "": return {"status": 0, "error": "Input parameter Missing: " + val} data = [] @@ -338,12 +359,17 @@ def new_publish1(): # BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \ # str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/' - FULL_UPDATE_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"{}".format(publish_type)+"_Full_Update/" + FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + '/' + \ + "{}".format(publish_type)+"_Full_Update/" + + FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \ + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' + LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \ + str(vehicle) + '/' + str(language) + '/' + "Global_Full_Update/" + LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \ + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' - FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' - LATEST_GLOBAL_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"Global_Full_Update/" - LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json' - if module == 'Feature Finder': # if publish_type == 'Internal': if os.path.isfile(FULL_UPDATE_PATH_FILE): @@ -355,7 +381,7 @@ def new_publish1(): data = data.get('data') data = data.get(variant) else: - + with open(LATEST_GLOBAL_FILE) as outfile: data = json.load(outfile) # data = data['data'] @@ -364,8 +390,6 @@ def new_publish1(): data = data.get('data') data = data.get(variant) - - elif module == 'Repair/Service Check Sheet': with open(BASE_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile: data = json.load(outfile) @@ -388,7 +412,7 @@ def new_publish1(): pass elif module == 'Mahindra Special Tool Information': - + with open(BASE_PATH + str(vehicle) + '-special_tool_information.json') as outfile: data = json.load(outfile) # data = data['data'] @@ -494,7 +518,7 @@ def filter_publish_json(src_json, filter_cond): return filtered_json[0] except Exception as e: - frappe.throw("fil",str(e)) + frappe.throw("fil", str(e)) def input_validation(req): @@ -512,10 +536,8 @@ def input_validation(req): return sts - - @frappe.whitelist() def get_published_data(): data = frappe.db.sql("""select vehicle,variant,language,publish_type from `tabPublish` where not in (select vehicle,variant,language,publish_type from - `tabModule Publish Mapping`)""",as_dict=1) - return data \ No newline at end of file + `tabModule Publish Mapping`)""", as_dict=1) + return data diff --git a/smart_service/masters/doctype/role_master/role_master.json b/smart_service/masters/doctype/role_master/role_master.json index b587e4d..3d7c8a0 100644 --- a/smart_service/masters/doctype/role_master/role_master.json +++ b/smart_service/masters/doctype/role_master/role_master.json @@ -11,6 +11,7 @@ "column_break_ffrrr", "modules", "skill_id", + "active_status", "section_break_bkwnr", "module_name_configuration", "module_master_html" @@ -56,11 +57,17 @@ { "fieldname": "section_break_bkwnr", "fieldtype": "Section Break" + }, + { + "fieldname": "active_status", + "fieldtype": "Select", + "label": "Active Status", + "options": "Active\nInactive" } ], "index_web_pages_for_search": 1, "links": [], - "modified": "2023-10-17 18:03:13.331281", + "modified": "2023-11-30 10:57:17.784350", "modified_by": "Administrator", "module": "Masters", "name": "Role Master", diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.js b/smart_service/phase_2/doctype/feature_finder/feature_finder.js index 8cc7c72..e97154d 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.js +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.js @@ -944,7 +944,7 @@ function custom_tab_html(frm) { { label: 'Value', fieldname: 'value', - fieldtype: 'Data', + fieldtype: 'Small Text', default: spec_dialog_value.value }, // { @@ -985,7 +985,7 @@ function custom_tab_html(frm) { }, callback: function (r) { if (r.message.status == "success") { - + frappe.msgprint("Submited data will be auto saved") cur_frm.reload_doc() cur_frm.refresh() } @@ -1077,6 +1077,7 @@ function custom_tab_html(frm) { }, callback: function (r) { if (r.message.status == "success") { + frappe.msgprint("Submited data will be auto saved") cur_frm.reload_doc() cur_frm.refresh() } @@ -1153,6 +1154,7 @@ function custom_tab_html(frm) { }, callback: function (r) { if (r.message.status == "success") { + frappe.msgprint("Submited data will be auto saved") cur_frm.reload_doc() cur_frm.refresh() } diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.py b/smart_service/phase_2/doctype/feature_finder/feature_finder.py index 94adc86..7a66d7f 100644 --- a/smart_service/phase_2/doctype/feature_finder/feature_finder.py +++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.py @@ -105,7 +105,7 @@ def get_list_data(name, val): @frappe.whitelist() -def insert_spec_data(doc_name, values, tab_ref): +def insert_spec_data(doc_name, values, tab_ref=None): try: val = json.loads(values) get_count = frappe.db.sql("""select name from `tabFeature Finder Specification` where parent='%s' and feature_type='%s'""" % ( diff --git a/smart_service/transactions/doctype/publish/publish.py b/smart_service/transactions/doctype/publish/publish.py index 5df4bc8..aeaa997 100644 --- a/smart_service/transactions/doctype/publish/publish.py +++ b/smart_service/transactions/doctype/publish/publish.py @@ -528,10 +528,10 @@ def max_publish(doc): @frappe.whitelist() def variant(doctype, txt, searchfield, start, page_len, filters): vehicle = filters.get('vehicle') - return frappe.db.sql(""" select name,family_code,fuel,transmission,vehicle_segment,active_status + return frappe.db.sql(""" select name,family_code,fuel,transmission,vehicle_segment,active_status,drive FROM {0}.`tabVariant Mapping` where vehicle = "{1}" and (active_status like "%%%(txt)s%%" or vehicle_segment like "%%%(txt)s%%" or name like "%%%(txt)s%%" or variant like "%%%(txt)s%%" or - family_code like "%%%(txt)s%%" or fuel like "%%%(txt)s%%" or transmission like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name, vehicle) % {'txt': txt, 'start': start, 'page_len': page_len}) + family_code like "%%%(txt)s%%" or fuel like "%%%(txt)s%%" or transmission like "%%%(txt)s%%" or drive like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name, vehicle) % {'txt': txt, 'start': start, 'page_len': page_len}) @frappe.whitelist() diff --git a/smart_service/transactions/doctype/variant_mapping/variant_mapping.json b/smart_service/transactions/doctype/variant_mapping/variant_mapping.json index 5d270cc..5eda3c5 100644 --- a/smart_service/transactions/doctype/variant_mapping/variant_mapping.json +++ b/smart_service/transactions/doctype/variant_mapping/variant_mapping.json @@ -126,6 +126,7 @@ { "fieldname": "drive", "fieldtype": "Link", + "in_preview": 1, "in_standard_filter": 1, "label": "Drive", "options": "Drive", @@ -193,7 +194,7 @@ ], "index_web_pages_for_search": 1, "links": [], - "modified": "2023-08-03 18:15:04.709396", + "modified": "2023-11-30 10:25:25.713843", "modified_by": "Administrator", "module": "Transactions", "name": "Variant Mapping",