Browse Source

publish changes

version2
venkataakhil 12 months ago
parent
commit
ab2fb4aa3e
  1. 5
      smart_service/apis/app_user_login.py
  2. 98
      smart_service/apis/v2/master.py
  3. 137
      smart_service/apis/v2/phase2_test_publish.py
  4. 13
      smart_service/phase_2/doctype/feature_finder/feature_finder.js
  5. 2
      smart_service/phase_2/doctype/feature_finder/feature_finder.json
  6. 17
      smart_service/phase_2/doctype/feature_finder/feature_finder.py
  7. 2
      smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py
  8. 6
      smart_service/phase_2/doctype/special_tool_information/special_tool_information.py
  9. 0
      smart_service/phase_2/doctype/torque_category/__init__.py
  10. 8
      smart_service/phase_2/doctype/torque_category/test_torque_category.py
  11. 8
      smart_service/phase_2/doctype/torque_category/torque_category.js
  12. 41
      smart_service/phase_2/doctype/torque_category/torque_category.json
  13. 8
      smart_service/phase_2/doctype/torque_category/torque_category.py
  14. 0
      smart_service/phase_2/doctype/torque_details/__init__.py
  15. 58
      smart_service/phase_2/doctype/torque_details/torque_details.json
  16. 8
      smart_service/phase_2/doctype/torque_details/torque_details.py
  17. 0
      smart_service/phase_2/doctype/torque_master/__init__.py
  18. 8
      smart_service/phase_2/doctype/torque_master/test_torque_master.py
  19. 8
      smart_service/phase_2/doctype/torque_master/torque_master.js
  20. 68
      smart_service/phase_2/doctype/torque_master/torque_master.json
  21. 8
      smart_service/phase_2/doctype/torque_master/torque_master.py
  22. 0
      smart_service/phase_2/doctype/torque_subcategory/__init__.py
  23. 8
      smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py
  24. 8
      smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js
  25. 41
      smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json
  26. 8
      smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py
  27. 2
      smart_service/phase_2/doctype/training_information/training_information.py
  28. 2
      smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js
  29. 193
      smart_service/transactions/doctype/publish/publish.js
  30. 14
      smart_service/transactions/doctype/publish/publish.json
  31. 362
      smart_service/transactions/doctype/publish/publish.py
  32. 123
      smart_service/transactions/doctype/variant_mapping/variant_mapping.py

5
smart_service/apis/app_user_login.py

@ -812,11 +812,14 @@ def input_validation(**kwargs):
sts = ""
i = 0
for key, value in kwargs.items():
if value is None or value == "":
# frappe.log_error("key"+str(key))
# frappe.log_error("value"+str(value))
if value is None or value == "" or value == '':
if i != 0:
sts += ','
sts += str(key) + ""
i += 1
# frappe.log_error(sts)
return sts

98
smart_service/apis/v2/master.py

@ -6,7 +6,8 @@ from frappe import utils
import json
import html
import os
from smart_service.apis.app_user_login import input_validation
from frappe.utils import cstr
site_name = cstr(frappe.local.site)
@ -311,7 +312,7 @@ def new_publish():
return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None}
@frappe.whitelist(methods=['POST'])
@frappe.whitelist(methods=['POST'],allow_guest = 1)
def new_publish1():
req = json.loads(frappe.request.data)
try:
@ -321,6 +322,10 @@ def new_publish1():
variant = req['variant']
language = req['language']
val = input_validation(req)
if val != '' or val != "":
return {"status": 0, "error": "Input parameter Missing: " + val}
data = []
# Get Publish Type
publish_type = frappe.db.get_list(
@ -328,40 +333,69 @@ def new_publish1():
if len(publish_type) > 0:
publish_type = publish_type[0]['publish_type']
else:
return {"status": 0, "error": "Publish Type Not Set For: " + iid}
BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \
str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/'
# BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \
# str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/'
FULL_UPDATE_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"{}".format(publish_type)+"_Full_Update/"
FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
LATEST_GLOBAL_PATH = BASE_URL+ "/files/json_files/phase2/" + str(vehicle) + '/' + str(language) + '/' +"Global_Full_Update/"
LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
if module == 'Feature Finder':
with open(BASE_PATH + str(vehicle) + '-feature_finder.json') as outfile:
data = json.load(outfile)
data = data['data']
filter_json = filter_publish_json(data, variant)
data = filter_json
# if publish_type == 'Internal':
if os.path.isfile(FULL_UPDATE_PATH_FILE):
with open(FULL_UPDATE_PATH_FILE) as outfile:
data = json.load(outfile)
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
data = data.get('data')
data = data.get(variant)
else:
with open(LATEST_GLOBAL_FILE) as outfile:
data = json.load(outfile)
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
data = data.get('data')
data = data.get(variant)
elif module == 'Repair/Service Check Sheet':
with open(BASE_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile:
data = json.load(outfile)
data = data['data']
filter_json = filter_publish_json(data, variant)
data = filter_json
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
data = data.get('data')
data = data.get(variant)
elif module == 'QWIK Service':
with open(BASE_PATH + str(vehicle) + '-qwik_service.json') as outfile:
data = json.load(outfile)
data = data['data']
filter_json = filter_publish_json(data, variant)
data = filter_json
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
data = data.get('data')
data = data.get(variant)
elif module == 'Training Information':
pass
elif module == 'Mahindra Special Tool Information':
with open(BASE_PATH + str(vehicle) + '-special_tool_information.json') as outfile:
data = json.load(outfile)
data = data['data']
filter_json = filter_publish_json(data, variant)
data = filter_json
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
data = data.get('data')
data = data.get(variant)
elif module == 'PDI Inspection':
pass
@ -376,7 +410,7 @@ def new_publish1():
except Exception as e:
frappe.local.response['http_status_code'] = 400
return {'status': 0, 'error': "Parameter's missing: " + str(e), 'data': None}
return {'status': 0, 'error': str(e), 'data': None}
@frappe.whitelist(methods=['POST'], allow_guest=1)
@ -460,4 +494,28 @@ def filter_publish_json(src_json, filter_cond):
return filtered_json[0]
except Exception as e:
frappe.throw(str(e))
frappe.throw("fil",str(e))
def input_validation(req):
sts = ""
i = 0
for key, value in req.items():
frappe.log_error("key"+str(key))
frappe.log_error("value"+str(value))
if value is None or value == "" or value == '':
if i != 0:
sts += ','
sts += str(key) + ""
i += 1
frappe.log_error(sts)
return sts
@frappe.whitelist()
def get_published_data():
data = frappe.db.sql("""select vehicle,variant,language,publish_type from `tabPublish` where not in (select vehicle,variant,language,publish_type from
`tabModule Publish Mapping`)""",as_dict=1)
return data

137
smart_service/apis/v2/phase2_test_publish.py

@ -0,0 +1,137 @@
import frappe
import re
from urllib import request
import datetime
from frappe import utils
import json
import html
import os
from frappe.utils import logger
from frappe.utils import cstr
frappe.utils.logger.set_log_level("DEBUG")
site_name = cstr(frappe.local.site)
BASE_URL = os.path.expanduser(
"~") + "/frappe-bench/sites/" + site_name + "/public"
date_format = "%Y-%m-%d %H:%M:%S.%f"
current_db_name = frappe.conf.get("db_name")
@frappe.whitelist(allow_guest = 1,methods = ['POST'])
def repair_checksheet_publish(vehicle = None, language = None
, publish_type = None , parent = None):
try:
req = json.loads(frappe.request.data)
logger_file = f'{vehicle} - {language} - {publish_type} - repair checksheet'
logger = frappe.logger(logger_file, allow_site=True, file_count=100)
logger.info(
f'Repair Checksheet Started-{vehicle}-{language}-{publish_type}-{parent}')
folder_url = base_url+"/"+vehicle.replace(' ', '-')+"-TEST" + "/"+language
logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}')
'''Publish Ready Flags'''
publish_repair_checksheet = 0
repair_checksheet_tmp = []
'''Create Folder For Publish'''
create_publish_folders(folder_url)
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json'
full_update_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-repair_check_sheet_full_update' + '.json'
'''Append Published Data to Json'''
logger.info(
f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}')
vehicle_data = {
'vehicle': vehicle,
'vehicle_myid': vehicle_id,
'publish_type': publish_type,
'publish_description': release_description,
'publish_language': language,
'data': ''
}
'''update existing global json file'''
if os.path.isfile(file_path):
with open(file_path) as f:
published_data = json.load(f)
for i in parent:
repair_checksheet = repair_checksheet_data(vehicle, language,
publish_type, i.parent_name)
if repair_checksheet['status'] == 1:
publish_repair_checksheet = 1
repair_checksheet_tmp.append(
repair_checksheet['data'][0])
repair_checksheet_tmp = get_latest_data({'data': published_data['data']}, {
'data': repair_checksheet_tmp})
else:
for i in parent.repiar_checksheet_publish_docs:
repair_checksheet = repair_checksheet_data(
vehicle, language, publish_type, i.parent_name)
if repair_checksheet['status'] == 1:
publish_repair_checksheet = 1
if len(repair_checksheet['data']):
repair_checksheet_tmp.append(
repair_checksheet['data'][0])
repair_checksheet_tmp = get_latest_data(
{'data': repair_checksheet_tmp}, {'data': []})
if publish_repair_checksheet == 1:
""" Save publish file """
vehicle_data['data'] = repair_checksheet_tmp
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
return 1, file_path
except Exception as e:
logger.info(
f'{vehicle} - {language} - {publish_type} error in json creation' + str(e))
frappe.throw('Failed To Publish')
def repair_checksheet_data(vehicle=None, language=None,
publish_type=None, parent=None):
try:
logger_file = f'{vehicle} - {language} - {publish_type} - repair_checksheet_data'
logger = frappe.logger(logger_file,
allow_site=True, file_count=100)
logger.info(
f"start of fetching repair checksheet data - {vehicle} - {language}")
repair_service_details = frappe.db.sql(f"""select vehicle,vehicle_id,
name,language,check_list_name,
keywords,
active_status,display_order,my_id
from `tabRepair Service Mapping`
where vehicle = '{vehicle}'
and language = '{language}'
and name = '{parent}'""", as_dict=1)
for d in repair_service_details:
d['complaint'] = frappe.db.sql(f"""select complaint,remedial_action_ok,
remedial_action_not_ok,idx as display_order from `tabRepair Service Child`
where parent = '{d['name']}'
order by display_order ;""", as_dict=1)
logger.info(
f"end of fetching repair checksheet data {vehicle} - {language}")
success_reponse['data'] = repair_service_details
success_reponse[
'message'] = f'Repair Checksheet Fecthed Succesfully for {vehicle} - {language} - repair_checksheet_data '
return success_reponse
except Exception as e:
failure_reponse['error'] = f"{vehicle} - {language} has following error - " + str(
e)
logger.error('error in repair checksheet' + str(e))
return failure_reponse

13
smart_service/phase_2/doctype/feature_finder/feature_finder.js

@ -798,10 +798,12 @@ function custom_tab_html(frm) {
frappe.call({
method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_spec_data",
args: {
values: arr
values: arr,
doc_name:frm.doc.name
},
callback: function (r) {
if (r.message.status == "success") {
frm.set_value('is_published', '0')
cur_frm.reload_doc()
}
}
@ -823,10 +825,12 @@ function custom_tab_html(frm) {
frappe.call({
method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_image_data",
args: {
values: arr
values: arr,
doc_name:frm.doc.name
},
callback: function (r) {
if (r.message.status == "success") {
frm.set_value('is_published', '0')
cur_frm.reload_doc()
}
}
@ -848,7 +852,8 @@ function custom_tab_html(frm) {
frappe.call({
method: "smart_service.phase_2.doctype.feature_finder.feature_finder.delete_content_data",
args: {
values: arr
values: arr,
doc_name:frm.doc.name
},
callback: function (r) {
if (r.message.status == "success") {
@ -1106,7 +1111,7 @@ function custom_tab_html(frm) {
{
label: 'Content',
fieldname: 'content',
fieldtype: 'Data',
fieldtype: 'Small Text',
default: content_dialog_value.content,
},

2
smart_service/phase_2/doctype/feature_finder/feature_finder.json

@ -209,7 +209,7 @@
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-10-06 11:11:09.968138",
"modified": "2023-11-24 16:28:33.427222",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Feature Finder",

17
smart_service/phase_2/doctype/feature_finder/feature_finder.py

@ -138,13 +138,16 @@ def insert_spec_data(doc_name, values, tab_ref):
@frappe.whitelist()
def delete_spec_data(values):
def delete_spec_data(values,doc_name):
try:
val = json.loads(values)
if len(val) > 0:
for d in val:
frappe.delete_doc("Feature Finder Specification", d)
frappe.db.commit()
if doc_name:
frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name))
frappe.db.commit()
return {"status": "success"}
except Exception as e:
frappe.log_error("delete_spec_data", str(e))
@ -184,13 +187,16 @@ def insert_image_data(doc_name, values, tab_ref):
@frappe.whitelist()
def delete_image_data(values):
def delete_image_data(values,doc_name):
try:
val = json.loads(values)
if len(val) > 0:
for d in val:
frappe.delete_doc("Feature Finder Images", d)
frappe.db.commit()
if doc_name:
frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name))
frappe.db.commit()
return {"status": "success"}
except Exception as e:
frappe.log_error("delete_image_data", str(e))
@ -230,13 +236,16 @@ def insert_content_data(doc_name, values, tab_ref):
@frappe.whitelist()
def delete_content_data(values):
def delete_content_data(values,doc_name):
try:
val = json.loads(values)
if len(val) > 0:
for d in val:
frappe.delete_doc("Feature Finder Content", d)
frappe.db.commit()
if doc_name:
frappe.db.sql("""UPDATE `tabFeature Finder` set is_published='0' WHERE name ='%s'""" % (doc_name))
frappe.db.commit()
return {"status": "success"}
except Exception as e:
frappe.log_error("delete_image_data", str(e))
@ -294,7 +303,7 @@ def insert_tab_ref(docname, tab_ref):
@frappe.whitelist()
def file_validation(image):
def file_validation(image=None):
from smart_service.apis.utils import check_png_ext, check_img_ext,details_of_image
if image:
res = check_img_ext(image)

2
smart_service/phase_2/doctype/feature_finder_master/feature_finder_master.py

@ -9,7 +9,7 @@ class FeatureFinderMaster(Document):
@frappe.whitelist()
def file_validation(image, name, value=None):
def file_validation(image =None, name=None, value=None):
from smart_service.apis.utils import check_png_ext, check_img_ext,check_pdf_ext,details_of_image
if image:
res = check_pdf_ext(image)

6
smart_service/phase_2/doctype/special_tool_information/special_tool_information.py

@ -39,7 +39,7 @@ class SpecialToolInformation(Document):
@frappe.whitelist()
def file_validation(image, name):
def file_validation(image=None, name=None):
from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image
if image:
res = check_img_ext(image)
@ -61,7 +61,7 @@ def file_validation(image, name):
return ret, 1
@frappe.whitelist()
def file_validation1(image, name):
def file_validation1(image=None, name=None):
from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image
if image:
res = check_img_ext(image)
@ -108,7 +108,7 @@ def file_validation3(video=None):
@frappe.whitelist()
def file_validation_child(name, value=None):
def file_validation_child(name=None, value=None):
from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image, check_mp4_ext, get_file_size
if name:
child_data = frappe.db.sql(

0
smart_service/phase_2/doctype/torque_category/__init__.py

8
smart_service/phase_2/doctype/torque_category/test_torque_category.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors
# See license.txt
# import frappe
import unittest
class TestTorqueCategory(unittest.TestCase):
pass

8
smart_service/phase_2/doctype/torque_category/torque_category.js

@ -0,0 +1,8 @@
// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
// For license information, please see license.txt
frappe.ui.form.on('Torque Category', {
// refresh: function(frm) {
// }
});

41
smart_service/phase_2/doctype/torque_category/torque_category.json

@ -0,0 +1,41 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-11-24 17:09:08.707664",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"category"
],
"fields": [
{
"fieldname": "category",
"fieldtype": "Data",
"label": "Category"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-11-24 17:09:08.707664",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Category",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC"
}

8
smart_service/phase_2/doctype/torque_category/torque_category.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TorqueCategory(Document):
pass

0
smart_service/phase_2/doctype/torque_details/__init__.py

58
smart_service/phase_2/doctype/torque_details/torque_details.json

@ -0,0 +1,58 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-11-24 17:09:41.181853",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"fastener_description",
"specification",
"qty",
"torque_nm",
"remark"
],
"fields": [
{
"fieldname": "fastener_description",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Fastener Description"
},
{
"fieldname": "specification",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Specification"
},
{
"fieldname": "qty",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Qty"
},
{
"fieldname": "torque_nm",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Torque NM"
},
{
"fieldname": "remark",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Remark"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2023-11-24 17:11:33.158796",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Details",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC"
}

8
smart_service/phase_2/doctype/torque_details/torque_details.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TorqueDetails(Document):
pass

0
smart_service/phase_2/doctype/torque_master/__init__.py

8
smart_service/phase_2/doctype/torque_master/test_torque_master.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors
# See license.txt
# import frappe
import unittest
class TestTorqueMaster(unittest.TestCase):
pass

8
smart_service/phase_2/doctype/torque_master/torque_master.js

@ -0,0 +1,8 @@
// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
// For license information, please see license.txt
frappe.ui.form.on('Torque Master', {
// refresh: function(frm) {
// }
});

68
smart_service/phase_2/doctype/torque_master/torque_master.json

@ -0,0 +1,68 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-11-24 17:10:52.598215",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"category",
"column_break_gkwxt",
"sub_category",
"section_break_cmyhs",
"torque_table"
],
"fields": [
{
"fieldname": "category",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Category",
"options": "Torque Category"
},
{
"fieldname": "sub_category",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Sub Category",
"options": "Torque Subcategory"
},
{
"fieldname": "torque_table",
"fieldtype": "Table",
"label": "Torque Table",
"options": "Torque Details"
},
{
"fieldname": "column_break_gkwxt",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_cmyhs",
"fieldtype": "Section Break"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-11-24 17:12:07.675053",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Master",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC"
}

8
smart_service/phase_2/doctype/torque_master/torque_master.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TorqueMaster(Document):
pass

0
smart_service/phase_2/doctype/torque_subcategory/__init__.py

8
smart_service/phase_2/doctype/torque_subcategory/test_torque_subcategory.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors
# See license.txt
# import frappe
import unittest
class TestTorqueSubcategory(unittest.TestCase):
pass

8
smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.js

@ -0,0 +1,8 @@
// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
// For license information, please see license.txt
frappe.ui.form.on('Torque Subcategory', {
// refresh: function(frm) {
// }
});

41
smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.json

@ -0,0 +1,41 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-11-24 17:09:21.527639",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"sub_category"
],
"fields": [
{
"fieldname": "sub_category",
"fieldtype": "Data",
"label": "Sub Category"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-11-24 17:09:21.527639",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Subcategory",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC"
}

8
smart_service/phase_2/doctype/torque_subcategory/torque_subcategory.py

@ -0,0 +1,8 @@
# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TorqueSubcategory(Document):
pass

2
smart_service/phase_2/doctype/training_information/training_information.py

@ -21,7 +21,7 @@ class TrainingInformation(Document):
@frappe.whitelist()
def file_validation_child(name, value=None):
def file_validation_child(name=None, value=None):
from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image, check_mp4_ext, get_file_size
if name:
child_data = frappe.db.sql(

2
smart_service/transactions/doctype/kilometer_mapping/kilometer_mapping.js

@ -109,8 +109,6 @@ frappe.ui.form.on('Kilometer Mapping', {
message: __('Saved'),
indicator: 'green'
}, 5);
// frm.refresh()
// frm.refresh_fields("config_kilometer");
cur_frm.reload_doc();
}
}

193
smart_service/transactions/doctype/publish/publish.js

@ -1,6 +1,5 @@
// Copyright (c) 2021, Hard n Soft Technologies Pvt Ltd and contributors
// For license information, please see license.txt
cur_frm.fields_dict["variant_mapping"].get_query = function (doc, cdt, cdn) {
return {
query: "smart_service.transactions.doctype.publish.publish.variant",
@ -17,9 +16,56 @@ cur_frm.fields_dict["vehicle"].get_query = function (doc, cdt, cdn) {
};
let lang_set_first_time = true;
var html_variant = []
function get_variant_mapping(frm){
var res = $(cur_frm.fields_dict.variant_mapping_html.wrapper).empty();
var vm =`<table class="table table-bordered">
<thead>
<tr>
<th scope="col">#</th>
<th scope="col">Family Code</th>
<th scope="col">Fuel</th>
<th scope="col">Transmission</th>
<th scope="col">Drive</th>
</tr>
</thead>
`
vm += ` <tbody></tbody></table>`
res.append(vm)
if(frm.doc.variant_mapping_details){
var variant_mapping_details_ = frm.doc.variant_mapping_details
variant_mapping_details_ = variant_mapping_details_.split("\n")
variant_mapping_details_.map(v=>{
frappe.db.get_list('Variant Mapping', {
fields: ['family_code', 'fuel','transmission','drive'],
filters: { name: v }
}).then(val => {
if (val){
var docv= ` <tr>
<th scope="row">1</th>
<td>${val[0]['family_code']}</td>
<td>${val[0]['fuel']}</td>
<td>@${val[0]['transmission']}</td>
<td>@${val[0]['drive']}</td>
</tr>`
res.find('tbody').append(docv)
}
})
})
}
}
frappe.ui.form.on("Publish", {
onload: function (frm) {
setTimeout(function mysam(){
$('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none')
$('[data-fieldname="qwik_service_publish"]').find('.grid-add-row').css('display', 'none')
@ -36,7 +82,7 @@ frappe.ui.form.on("Publish", {
}
if (
frm.doc.hasOwnProperty("feature_finder_publish_docs") &&
frm.doc.feature_finder_publish_docs.length > 0
frm.doc.feature_finder_publish_docs.length > 0 && frm.doc.docstatus == 1
) {
frm.set_df_property("feature_finder_publish_docs", "hidden", 0);
cur_frm.set_df_property("feature_finder_publish_docs", "read_only", 1);
@ -210,6 +256,9 @@ frappe.ui.form.on("Publish", {
}
},
refresh: function (frm) {
// if(frm.doc.variant_mapping_details){
// get_variant_mapping(frm)
// }
setTimeout(function mysam(){
$('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none')
$('[data-fieldname="qwik_service_publish"]').find('.grid-add-row').css('display', 'none')
@ -632,7 +681,8 @@ frappe.ui.form.on("Publish", {
frm.set_value("version", r.message['data']);
cur_frm.refresh_field("version");
frm.set_value("actual_published_date", frappe.datetime.nowdate());
window.location.reload();
// window.location.reload();
cur_frm.reload_doc()
}
else{
frappe.msgprint("Failed To Publish")
@ -650,7 +700,7 @@ frappe.ui.form.on("Publish", {
frm.refresh_fields();
frappe.msgprint(
__(
"<b>Publish Description:</b> Only letters, numbers and <b> / - () & , ' : </b> are allowed."
// "<b>Publish Description:</b> Only letters, numbers and <b> / - () & , ' : </b> are allowed."
)
);
}
@ -682,7 +732,6 @@ frappe.ui.form.on("Publish", {
}
},
variant_mapping: function (frm) {
setTimeout(function mysam(){
$('[data-fieldname="special_tool_publish"]').find('.grid-add-row').css('display', 'none')
$('[data-fieldname="qwik_service_publish"]').find('.grid-add-row').css('display', 'none')
@ -802,51 +851,51 @@ frappe.ui.form.on("Publish", {
if(frm.doc.publish_module == "Qwik Service"){
add_qwik_service(frm)
}
// if (frm.doc.docstatus === 0 && frm.doc.publish_status == 'To Publish' && frm.doc.publish_module=='Feature Finder') {
// frm.get_field('feature_finder_publish').grid.add_custom_button(__('Add to Publish'), function () {
// let added = 0;
// frm.doc.feature_finder_publish.forEach(row => {
// if (row.active_status == "Active" || row.active_status == "Inactive") {
// let dulicate = false;
// if (frm.doc.feature_finder_publish_docs) {
// frm.doc.feature_finder_publish_docs.forEach(publishrow => {
// if (row.parent1 == publishrow.parent1) {
// dulicate = true;
// }
// });
// }
// if (dulicate === false) {
// let child = cur_frm.add_child("feature_finder_publish_docs");
// child.vehicle = frm.doc.vehicle;
// child.parent1 = row.parent1;
// child.language = row.language;
// child.variant = row.variant;
// child.active_status = row.active_status;
// added += 1;
// frm.set_df_property('feature_finder_publish_docs','hidden',0)
// cur_frm.refresh_field("feature_finder_publish",'read_only',1);
// refresh_field("feature_finder_publish_docs");
// cur_frm.set_df_property("feature_finder_publish",'read_only',1);
// document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[4] .setAttribute("style","display:none;");
// document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[5] .setAttribute("style","display:none;");
// }
// else{
// frappe.msgprint(__(row.parent1 + 'Alraeady Added'))
// }
// cur_frm.refresh_field("feature_finder_publish_docs",'read_only',1);
// }
// });
// if (added) { frappe.msgprint(__(added + " Doc(s) Added"));
// cur_frm.set_df_property('feature_finder_publish','read_only',1)
// cur_frm.set_df_property("feature_finder_publish_docs",'read_only',1);
// cur_frm.refresh_field("feature_finder_publish"); }
// else if(added==0) { frappe.msgprint(__("No Doc Added")); }
// });
// frm.fields_dict.repair_checksheet_publish.grid.grid_buttons.find('.btn-custom').removeClass('btn-default').addClass('btn-primary');
// }
if (frm.doc.docstatus === 0 && frm.doc.publish_status == 'To Publish' && frm.doc.publish_module=='Feature Finder') {
frm.get_field('feature_finder_publish').grid.add_custom_button(__('Add to Publish'), function () {
let added = 0;
frm.doc.feature_finder_publish.forEach(row => {
if (row.active_status == "Active" || row.active_status == "Inactive") {
let dulicate = false;
if (frm.doc.feature_finder_publish_docs) {
frm.doc.feature_finder_publish_docs.forEach(publishrow => {
if (row.parent1 == publishrow.parent1) {
dulicate = true;
}
});
}
if (dulicate === false) {
let child = cur_frm.add_child("feature_finder_publish_docs");
child.vehicle = frm.doc.vehicle;
child.parent1 = row.parent1;
child.language = row.language;
child.variant = row.variant;
child.active_status = row.active_status;
added += 1;
frm.set_df_property('feature_finder_publish_docs','hidden',0)
cur_frm.refresh_field("feature_finder_publish",'read_only',1);
refresh_field("feature_finder_publish_docs");
cur_frm.set_df_property("feature_finder_publish",'read_only',1);
document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[4] .setAttribute("style","display:none;");
document.getElementsByClassName('btn btn-xs btn-secondary grid-add-row')[5] .setAttribute("style","display:none;");
}
else{
frappe.msgprint(__(row.parent1 + 'Alraeady Added'))
}
cur_frm.refresh_field("feature_finder_publish_docs",'read_only',1);
}
});
if (added) { frappe.msgprint(__(added + " Doc(s) Added"));
cur_frm.set_df_property('feature_finder_publish','read_only',1)
cur_frm.set_df_property("feature_finder_publish_docs",'read_only',1);
cur_frm.refresh_field("feature_finder_publish"); }
else if(added==0) { frappe.msgprint(__("No Doc Added")); }
});
frm.fields_dict.repair_checksheet_publish.grid.grid_buttons.find('.btn-custom').removeClass('btn-default').addClass('btn-primary');
}
if (frm.doc.publish_module == "Automotive System") {
let km_name = frm.doc.variant_mapping + "-" + frm.doc.language + "-KM";
frappe.db.get_value(
@ -1024,30 +1073,30 @@ frappe.ui.form.on("Publish", {
refresh_field("publish_documents");
frappe.msgprint(__(frm.doc.variant_mapping + " Added"));
}
// if(frm.doc.feature_finder_publish_docs && frm.doc.publish_module=='Feature Finder'){
// frm.doc.publish_documents.forEach(publishrow => {
// if (frm.doc.variant_mapping == publishrow.variant_mapping) {
// dulicate = true;
// }
// });
// frappe.msgprint("Variant Mapping already added");
if(frm.doc.feature_finder_publish_docs && frm.doc.publish_module=='Feature Finder'){
frm.doc.publish_documents.forEach(publishrow => {
if (frm.doc.variant_mapping == publishrow.variant_mapping) {
dulicate = true;
}
});
frappe.msgprint("Variant Mapping already added");
// }
// if (dulicate === false && frm.doc.publish_module == 'Feature Finder') {
// let child = cur_frm.add_child("feature_finder_publish_docs");
// child.variant_mapping = frm.doc.variant_mapping;
// child.vehicle = frm.doc.vehicle;
// child.parent1 = row.parent1;
// child.language = row.language;
// child.variant = row.variant;
// child.active_status = row.active_status;
// refresh_field("feature_finder_publish_docs");
// frappe.msgprint(__(frm.doc.variant_mapping + " Added"));
// }
// else{
// frappe.msgprint(__(row.parent1 + 'Alraeady Added'))
}
if (dulicate === false && frm.doc.publish_module == 'Feature Finder') {
let child = cur_frm.add_child("feature_finder_publish_docs");
child.variant_mapping = frm.doc.variant_mapping;
child.vehicle = frm.doc.vehicle;
child.parent1 = row.parent1;
child.language = row.language;
child.variant = row.variant;
child.active_status = row.active_status;
refresh_field("feature_finder_publish_docs");
frappe.msgprint(__(frm.doc.variant_mapping + " Added"));
}
else{
frappe.msgprint(__(row.parent1 + 'Alraeady Added'))
// }
}
},
km_report: function (frm, cdt, cdn) {
if (frm.doc.publish_module == "Automotive System") {

14
smart_service/transactions/doctype/publish/publish.json

@ -28,6 +28,8 @@
"variant",
"variant_mapping_status",
"add_variant_mapping_to_publish",
"section_break_u1haz",
"variant_mapping_html",
"kilometer_mapping_section",
"kilometer_mapping",
"km_report",
@ -387,12 +389,22 @@
"hidden": 1,
"label": "Special Tool Publish Docs",
"options": "Special Tool Publish Docs"
},
{
"fieldname": "section_break_u1haz",
"fieldtype": "Section Break",
"hidden": 1
},
{
"fieldname": "variant_mapping_html",
"fieldtype": "HTML",
"label": "Variant Mapping HTML"
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2023-10-06 11:58:47.207441",
"modified": "2023-11-27 12:34:08.109971",
"modified_by": "Administrator",
"module": "Transactions",
"name": "Publish",

362
smart_service/transactions/doctype/publish/publish.py

@ -630,6 +630,12 @@ def create_publish_folders(folder_url):
if not os.path.isdir(folder_url + "/" + "Internal"):
os.makedirs(folder_url + "/" + "Internal")
if not os.path.isdir(folder_url + "/" + "Internal_Full_Update"):
os.makedirs(folder_url + "/" + "Internal_Full_Update")
if not os.path.isdir(folder_url + "/" + "Global_Full_Update"):
os.makedirs(folder_url + "/" + "Global_Full_Update")
return True
except Exception as e:
@ -774,25 +780,34 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
'data': ''
}
latest_pub_data = vehicle_data.copy()
'''Publish Ready Flags'''
publish_feature_finder = 0
feature_finder_tmp = []
'''Create Folder For Publish'''
create_publish_folders(folder_url)
full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-feature_finder' + '.json'
global_file_path = folder_url + "/" + 'Global' + "/" + \
vehicle.replace(' ', '-') + '-feature_finder' + '.json'
full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
# full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
global_full_update_path = folder_url + "/" + "Global_Full_Update"
check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
'''Append Published Data to Json'''
logger.info(
f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}')
f'Feature Data Append Start::{vehicle}-{language}-{publish_type}')
'''update existing global json file'''
if os.path.isfile(global_file_path) and publish_type == 'Internal':
with open(global_file_path) as f:
published_data = json.load(f)
if os.path.isfile(check_glbl_full_updte):
with open(check_glbl_full_updte) as f:
published_data = json.load(f)
for i in parent:
feature_finder = feature_finder_data(vehicle, language,
publish_type, i.parent1, i.variant)
@ -801,7 +816,8 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
feature_finder_tmp.append(
feature_finder['data'][0])
feature_finder_tmp = create_df(feature_finder_tmp)
vehi_data = compare_get_data({'data': published_data['data']}, {
latest_pub_data['data'] = feature_finder_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': feature_finder_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@ -811,7 +827,14 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, feature_finder_tmp)
elif os.path.isfile(file_path) and publish_type == 'Global':
with open(global_file_path) as f:
frappe.log_error('inside')
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
with open(check_glbl_full_updte) as f:
published_data = json.load(f)
for i in parent:
feature_finder = feature_finder_data(vehicle, language,
@ -821,7 +844,8 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
feature_finder_tmp.append(
feature_finder['data'][0])
feature_finder_tmp = create_df(feature_finder_tmp)
vehi_data = compare_get_data({'data': published_data['data']}, {
latest_pub_data['data'] = feature_finder_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': feature_finder_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@ -830,27 +854,78 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
vehi_data = add_new_val(
vehi_data, new_variant_name, feature_finder_tmp)
else:
for i in parent:
feature_finder = feature_finder_data(
vehicle, language, publish_type, i.parent1, i.variant)
if feature_finder['status'] == 1:
publish_feature_finder = 1
if len(feature_finder['data']):
feature_finder_tmp.append(
feature_finder['data'][0])
else:
frappe.throw('failed to publish')
feature_finder_tmp = get_latest_data(
{'data': feature_finder_tmp}, {'data': []})
#Existing Global or Internal Full Update
internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
if os.path.isfile(check_full_update_file):
with open(check_full_update_file) as f:
published_data = json.load(f)
for i in parent:
frappe.log_error("i",str(parent))
feature_finder = feature_finder_data(
vehicle, language, publish_type, i.parent1, i.variant)
if feature_finder['status'] == 1:
publish_feature_finder = 1
if len(feature_finder['data']):
feature_finder_tmp.append(
feature_finder['data'][0])
else:
frappe.throw('failed to publish')
feature_finder_tmp = create_df(feature_finder_tmp)
latest_pub_data['data'] = feature_finder_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': feature_finder_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
new_variant_name = [
x for x in new_variant if x not in find_distinct]
vehi_data = add_new_val(
vehi_data, new_variant_name, feature_finder_tmp)
vehi_data = create_df(feature_finder_tmp)
vehicle_data['data'] = vehi_data
#First Time Global Or Internal
else:
for i in parent:
frappe.log_error("619",str(parent))
feature_finder = feature_finder_data(
vehicle, language, publish_type, i.parent1, i.variant)
if feature_finder['status'] == 1:
publish_feature_finder = 1
if len(feature_finder['data'])>0:
feature_finder_tmp.append(
feature_finder['data'][0])
else:
frappe.throw('failed to publish')
feature_finder_tmp = get_latest_data(
{'data': feature_finder_tmp}, {'data': []})
frappe.log_error(str(feature_finder_tmp))
vehi_data = create_df(feature_finder_tmp)
latest_pub_data['data'] = vehi_data
if publish_type == "Global":
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
""" Save publish file """
vehicle_data['data'] = vehi_data
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(latest_pub_data, indent=4, default=str))
with open(full_update_file, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
logger.info(
f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}')
@ -862,57 +937,68 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
frappe.throw('Failed To Publish')
def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None,
language=None, publish_type=None,
release_description=None, parent=None):
def qwik_service_publish(vehicle=None, vehicle_id=None,
language=None, publish_type=None,
release_description=None, parent=None,
variant=None):
try:
logger_file = f'{variant} - {language} - {publish_type} - Qwik_service'
logger = frappe.logger(logger_file, allow_site=True, file_count=100)
logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}'
logger = frappe.logger(logger_file,
allow_site=True, file_count=100)
logger.info(
f'Qwik_service-{variant}-{language}-{publish_type}-{parent}')
f"start of fetching qwik_service data - {vehicle} - {language} - {module_name} - {variant}")
folder_url = base_url+"/"+vehicle.replace(' ', '-') + "/"+language
logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}')
'''Publish Ready Flags'''
publish_qwik_service = 0
qwik_service_tmp = []
qwik_service = ''
'''Create Folder For Publish'''
create_publish_folders(folder_url)
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-qwik_service' + '.json'
global_file_path = folder_url + "/" + "Global" + "/" + \
vehicle.replace(' ', '-') + '-qwik_service' + '.json'
exising_internal_path = folder_url + "/" + "Internal" + "/" + \
vehicle.replace(' ', '-') + '-qwik_service' + '.json'
'''Append Published Data to Json'''
logger.info(
f'Qwik_service Data Append Start::{vehicle}-{language}-{publish_type}')
'''Final Json'''
vehicle_data = {
'vehicle': vehicle,
'vehicle_myid': vehicle_id,
'variant': variant,
'publish_type': publish_type,
'publish_description': release_description,
'publish_language': language,
'data': ''
}
if os.path.isfile(global_file_path) and publish_type == 'Internal':
with open(global_file_path) as f:
published_data = json.load(f)
latest_pub_data = vehicle_data.copy()
'''Publish Ready Flags'''
publish_qwik_service = 0
qwik_service_tmp = []
'''Create Folder For Publish'''
create_publish_folders(folder_url)
full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '- qwik_service' + '.json'
global_file_path = folder_url + "/" + 'Global' + "/" + \
vehicle.replace(' ', '-') + '- qwik_service' + '.json'
full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json'
# full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
global_full_update_path = folder_url + "/" + "Global_Full_Update"
check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json'
'''Append Published Data to Json'''
logger.info(
f' qwik_service Data Append Start::{vehicle}-{language}-{publish_type}')
'''update existing global json file'''
if os.path.isfile(global_file_path) and publish_type == 'Internal':
if os.path.isfile(check_glbl_full_updte):
with open(check_glbl_full_updte) as f:
published_data = json.load(f)
for i in parent:
qwik_service = qwik_service_data(
language, publish_type, i.variant, i.parent1, vehicle)
if qwik_service['status'] == 1 and len(qwik_service['data']) > 0:
qwik_service = qwik_service_data(vehicle, language,
publish_type, i.parent1, i.variant)
if qwik_service['status'] == 1:
publish_qwik_service = 1
qwik_service_tmp.append(
qwik_service['data'][0])
qwik_service_tmp = create_df(qwik_service_tmp)
vehi_data = compare_get_data({'data': published_data['data']}, {
latest_pub_data['data'] = qwik_service_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': qwik_service_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@ -921,20 +1007,26 @@ def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None,
vehi_data = add_new_val(
vehi_data, new_variant_name, qwik_service_tmp)
elif os.path.isfile(global_file_path) and publish_type == 'Global':
with open(global_file_path) as f:
elif os.path.isfile(file_path) and publish_type == 'Global':
frappe.log_error('inside')
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
with open(check_glbl_full_updte) as f:
published_data = json.load(f)
for i in parent:
qwik_service = qwik_service_data(
language, publish_type, i.variant, i.parent1, vehicle)
if qwik_service['status'] == 1 and len(qwik_service['data']) > 0:
qwik_service = qwik_service_data(vehicle, language,
publish_type, i.parent1, i.variant)
if qwik_service['status'] == 1:
publish_qwik_service = 1
qwik_service_tmp.append(
qwik_service['data'][0])
qwik_service_tmp = create_df(qwik_service_tmp)
vehi_data = compare_get_data({'data': published_data['data']}, {
latest_pub_data['data'] = qwik_service_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': qwik_service_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@ -943,32 +1035,93 @@ def qwik_service_publish(vehicle=None, vehicle_id=None, variant=None,
vehi_data = add_new_val(
vehi_data, new_variant_name, qwik_service_tmp)
else:
for i in parent:
qwik_service = qwik_service_data(
language, publish_type, i.variant, i.parent1, vehicle)
if qwik_service['status'] == 1 and len(qwik_service['data']) > 0:
publish_qwik_service = 1
qwik_service_tmp.append(
qwik_service['data'][0])
else:
frappe.throw('failed to publish')
qwik_service_tmp = get_latest_data(
{'data': qwik_service_tmp}, {'data': []})
vehi_data = create_df(qwik_service_tmp)
#Existing Global or Internal Full Update
internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
if os.path.isfile(check_full_update_file):
with open(check_full_update_file) as f:
published_data = json.load(f)
for i in parent:
qwik_service = qwik_service_data(
vehicle, language, publish_type, i.parent1, i.variant)
if qwik_service['status'] == 1:
publish_qwik_service = 1
if len(qwik_service['data']):
qwik_service_tmp.append(
qwik_service['data'][0])
else:
frappe.throw('failed to publish')
qwik_service_tmp = create_df(qwik_service_tmp)
latest_pub_data['data'] = qwik_service_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
'data': qwik_service_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
new_variant_name = [
x for x in new_variant if x not in find_distinct]
vehi_data = add_new_val(
vehi_data, new_variant_name, qwik_service_tmp)
if vehi_data:
find_distinct = set(exisitng_var)
new_variant_name = [
x for x in new_variant if x not in find_distinct]
vehi_data = add_new_val(
vehi_data, new_variant_name, qwik_service_tmp)
vehicle_data['data'] = vehi_data
#First Time Global Or Internal
else:
for i in parent:
frappe.log_error("211",str(i))
qwik_service = qwik_service_data(
vehicle, language, publish_type, i.parent1, i.variant)
if qwik_service['status'] == 1:
publish_qwik_service = 1
if len(qwik_service['data'])>0:
qwik_service_tmp.append(
qwik_service['data'][0])
else:
frappe.throw('failed to publish')
qwik_service_tmp = get_latest_data(
{'data': qwik_service_tmp}, {'data': []})
frappe.log_error(str(qwik_service_tmp))
vehi_data = create_df(qwik_service_tmp)
latest_pub_data['data'] = vehi_data
if publish_type == "Global":
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
""" Save publish file """
vehicle_data['data'] = vehi_data
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(latest_pub_data, indent=4, default=str))
with open(full_update_file, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
except Exception as e:
logger.error(
f'{variant} - {language} - {publish_type} - Qwik_service'+str(e))
logger.info(
f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}')
return 1, file_path
except Exception as e:
logger.info(
f'{vehicle} - {language} - {publish_type} - {module_name} error in json creation' + str(e))
frappe.throw('Failed To Publish')
def special_tool_publish(vehicle, vehicle_id,
publish_type, release_description,
parent=None):
@ -987,9 +1140,9 @@ def special_tool_publish(vehicle, vehicle_id,
'''Create Folder For Publish'''
create_publish_folders(folder_url)
file_path = folder_url + "/" + publish_type + "/" + \
file_path = folder_url + "/" + publish_type+"_Full_Update" + "/"+ \
vehicle.replace(' ', '-') + '-special_tool' + '.json'
existing_global = folder_url + "/" + "Global" + "/" + \
existing_global = folder_url + "/" + "Global_Full_Update" + "/" + \
vehicle.replace(' ', '-') + '-special_tool' + '.json'
'''Append Published Data to Json'''
@ -1032,17 +1185,48 @@ def special_tool_publish(vehicle, vehicle_id,
'data': special_tool_tmp})
else:
for i in parent:
special_tool = special_tool_data(
vehicle, publish_type, i.parent1)
internal_full_update_check = folder_url + "/" +"Internal_Full_Update"
check_inter_full_update_file = internal_full_update_check +"/"+ \
vehicle.replace(' ', '-') + '-special_tool' + '.json'
if special_tool['status'] == 1:
publish_special_tool = 1
if len(special_tool['data']) > 0:
frappe.log_error(str(check_inter_full_update_file))
# check if Internal Full Update Already Exist
if os.path.isfile(check_inter_full_update_file):
frappe.log_error('inside_full_update')
with open(check_inter_full_update_file) as f:
published_data = json.load(f)
for i in parent:
special_tool = special_tool_data(
vehicle, publish_type, i.parent1)
if special_tool['status'] == 1 and len(special_tool['data']) > 0:
publish_special_tool = 1
special_tool_tmp.append(
special_tool['data'][0])
special_tool_tmp = get_latest_data(
{'data': special_tool_tmp}, {'data': []})
special_tool_tmp = get_latest_data({'data': published_data['data']}, {
'data': special_tool_tmp})
else:
for i in parent:
special_tool = special_tool_data(
vehicle, publish_type, i.parent1)
if special_tool['status'] == 1:
publish_special_tool = 1
if len(special_tool['data']) > 0:
special_tool_tmp.append(
special_tool['data'][0])
special_tool_tmp = get_latest_data(
{'data': special_tool_tmp}, {'data': []})
if publish_type == "Global":
#Remove Internal Full Update File On Global Publish
internal_full_update_check = folder_url + "/" +"Internal_Full_Update"
check_inter_full_update_file = internal_full_update_check +"/"+ \
vehicle.replace(' ', '-') + '-special_tool' + '.json'
if os.path.isfile(check_inter_full_update_file):
os.remove(check_inter_full_update_file)
if publish_special_tool == 1:
""" Save publish file """

123
smart_service/transactions/doctype/variant_mapping/variant_mapping.py

@ -1,60 +1,109 @@
# Copyright (c) 2021, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
import sys
import frappe
from frappe.model.document import Document
current_db_name = frappe.conf.get("db_name")
import sys
sys.tracebacklimit=0
sys.tracebacklimit = 0
class VariantMapping(Document):
def validate(self):
assets = frappe.db.sql("""select category,idx,active_status,attach_file from {0}.`tabVariant Mapping_Assets` where parent = '{1}'""".format(current_db_name,self.name),as_dict=True)
for x in assets:
for y in self.get("assets"):
if x.idx == y.idx and y.category == x.category:
if y.active_status != x.active_status:
y.published = "0"
frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(current_db_name,self.name,str(y.idx)))
frappe.db.commit()
if y.attach_file != x.attach_file:
y.published = "0"
frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(current_db_name,self.name,str(y.idx)))
frappe.db.commit()
if self.is_new():
data = frappe.db.sql("""select name from {0}.`tabVariant Mapping`
def validate(self):
assets = frappe.db.sql("""select category,idx,active_status,attach_file from {0}.`tabVariant Mapping_Assets` where parent = '{1}'""".format(
current_db_name, self.name), as_dict=True)
for x in assets:
for y in self.get("assets"):
if x.idx == y.idx and y.category == x.category:
if y.active_status != x.active_status:
y.published = "0"
frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(
current_db_name, self.name, str(y.idx)))
frappe.db.commit()
if y.attach_file != x.attach_file:
y.published = "0"
frappe.db.sql("""update {0}.`tabVariant Mapping_Assets` set published = '0' where parent = '{1}' and idx = '{2}'""".format(
current_db_name, self.name, str(y.idx)))
frappe.db.commit()
if self.is_new():
data = frappe.db.sql("""select name from {0}.`tabVariant Mapping`
where variant = '{1}' and fuel = '{2}' and
transmission = '{3}' and drive = '{4}'""".format(current_db_name,self.variant,self.fuel,self.transmission,self.drive),as_dict=True)
if data:
if data != self.name:
frappe.throw('Variant Mapping already created as {0} for {1}'.format(data[0]['name'],self.variant))
if self.assets:
for x in self.get("assets"):
if not x.language:
frappe.throw("Please select the language in row no " + str(x.idx))
if not x.attach_file:
frappe.throw("Please select the attach file in row no " + str(x.idx))
transmission = '{3}' and drive = '{4}'""".format(current_db_name, self.variant, self.fuel, self.transmission, self.drive), as_dict=True)
if data:
if data != self.name:
frappe.throw('Variant Mapping already created as {0} for {1}'.format(
data[0]['name'], self.variant))
create_module_publish(self.vehicle, self.variant)
if self.assets:
for x in self.get("assets"):
if not x.language:
frappe.throw(
"Please select the language in row no " + str(x.idx))
if not x.attach_file:
frappe.throw(
"Please select the attach file in row no " + str(x.idx))
@frappe.whitelist()
def variant(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql(""" select name,variant,vehicle,vehicle_segment,active_status
return frappe.db.sql(""" select name,variant,vehicle,vehicle_segment,active_status
FROM {0}.tabVariant where
(active_status like "%%%(txt)s%%" or vehicle_segment like "%%%(txt)s%%" or name like "%%%(txt)s%%" or variant like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len})
(active_status like "%%%(txt)s%%" or vehicle_segment like "%%%(txt)s%%" or name like "%%%(txt)s%%" or variant like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len})
@frappe.whitelist()
def fuel(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql(""" select name FROM {0}.tabFuel where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len})
return frappe.db.sql(""" select name FROM {0}.tabFuel where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len})
@frappe.whitelist()
def transmission(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql(""" select name FROM {0}.tabTransmission where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len})
return frappe.db.sql(""" select name FROM {0}.tabTransmission where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len})
@frappe.whitelist()
def drive(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql(""" select name FROM {0}.tabDrive where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name)%{'txt': txt, 'start': start, 'page_len': page_len})
return frappe.db.sql(""" select name FROM {0}.tabDrive where
(name like "%%%(txt)s%%") order by modified desc limit %(start)s, %(page_len)s """.format(current_db_name) % {'txt': txt, 'start': start, 'page_len': page_len})
@frappe.whitelist(allow_guest=1)
def create_module_publish(vehicle, variant):
language_list = frappe.db.sql(
''' select lang_code from `tabCustom Languages`; ''', as_dict=1)
if language_list:
for l in language_list:
doc = frappe.get_doc({
'doctype': 'Module Publish Mapping',
"vehicle": vehicle,
"variant": variant,
"language": l['lang_code'],
"publish_type": "Internal"
})
doc.save()
doc1 = frappe.get_doc({
'doctype': 'Module Publish Mapping',
"vehicle": vehicle,
"variant": variant,
"language": l['lang_code'],
"publish_type": "Global"
})
doc1.save()
frappe.log_error('Doc Global', str(doc1))
def update_repair_service(vehicle):
repair_data = frappe.db.sql(
f'''select * from `tabModule Publish Mapping` where vehicle = '{vehicle}' and repairservice_check_sheet= 1;''')

Loading…
Cancel
Save