diff --git a/smart_service/apis/utils.py b/smart_service/apis/utils.py
index a5c328c..c757a4b 100644
--- a/smart_service/apis/utils.py
+++ b/smart_service/apis/utils.py
@@ -58,6 +58,20 @@ def check_mp4_ext(path):
else:
return False
+
+@frappe.whitelist()
+def check_ppt_ext(path):
+ if path:
+ ext = path.split()[-1]
+ if ext.lower().endswith('.pptx'):
+ return True
+ elif ext.lower().endswith('.pptm'):
+ return True
+ elif ext.lower().endswith('.ppt'):
+ return True
+ else:
+ return False
+
def details_of_image(image_file):
try:
diff --git a/smart_service/apis/v2/master.py b/smart_service/apis/v2/master.py
index 8edd51c..aecc8ec 100644
--- a/smart_service/apis/v2/master.py
+++ b/smart_service/apis/v2/master.py
@@ -16,7 +16,7 @@ site_name = cstr(frappe.local.site)
BASE_URL = os.path.expanduser(
"~") + "/frappe-bench/sites/" + site_name + "/public"
# base = os.path.expanduser(
-# "~") + "/frappe-bench/sites/" + site_name
+# "~") + "/frappe-bench/sites/" + site_name
date_format = "%Y-%m-%d %H:%M:%S.%f"
@@ -179,8 +179,8 @@ def get_training_information(vehicle, language):
return False, e
-@frappe.whitelist(methods=['POST'],allow_guest=1)
-def new_publish():
+@frappe.whitelist(methods=['POST'], allow_guest=1)
+def new_publish_old():
req = json.loads(frappe.request.data)
try:
module = req['module']
@@ -296,25 +296,23 @@ def new_publish():
qp_rhs['content'] = cnverted_data
elif module == 'Training Information':
- base_path = BASE_URL + "/" + "training" +"/"+ vehicle
- scrom_path = base_path +"/"+ "scrom"
+ base_path = BASE_URL + "/" + "training" + "/" + vehicle
+ scrom_path = base_path + "/" + "scrom"
# html_path = base_path +"/"+ "/html"
- frappe.log_error(str(base_path))
flag, data = get_training_information(vehicle, language)
- for d in data:
- if len(d['content']) > 0:
- for ch in d['content']:
- if ch['topic'] == 'SCROM':
- scrom_folder_path = base_path +"/"+ "scrom" + "/" + ch['content'].split("/")[2].split(".")[0]
- if not os.path.exists(scrom_folder_path):
- os.makedirs(scrom_folder_path)
- if ch['content']:
- frappe.log_error(str(ch['content']))
- with zipfile.ZipFile(BASE_URL+ch['content'], 'r') as zip_ref:
- zip_ref.extractall(path=scrom_folder_path)
- ch['content'] = scrom_folder_path + ch['content'].split("/")[2].split(".")[0]
- # cont=frappe.db.sql('''select content from `tabTraining Information Content Reference` where parent = %s and topic = 'SCROM' '''%(d['name']),as_dict=1)
-
+ # for d in data:
+ # if d['content']:
+ # for ch in d['content']:
+ # if ch['topic'] == 'SCROM' and ch['content']!=None:
+ # scrom_folder_path = base_path +"/"+ "scrom" + "/" + ch['content'].split("/")[2].split(".")[0]
+ # if not os.path.exists(scrom_folder_path):
+ # os.makedirs(scrom_folder_path)
+ # if ch['content']:
+ # frappe.log_error(str(ch['content']))
+ # with zipfile.ZipFile(BASE_URL+ch['content'], 'r') as zip_ref:
+ # zip_ref.extractall(path=scrom_folder_path)
+ # ch['content'] = scrom_folder_path + ch['content'].split("/")[2].split(".")[0]
+ # cont=frappe.db.sql('''select content from `tabTraining Information Content Reference` where parent = %s and topic = 'SCROM' '''%(d['name']),as_dict=1)
elif module == 'Mahindra Special Tool Information':
CLEANR_back_slash = re.compile("\"")
@@ -337,7 +335,7 @@ def new_publish():
elif module == 'Torque Information':
flag, data = get_torque_information(variant, language)
-
+
else:
flag = False
data = 'No Module found'
@@ -355,7 +353,7 @@ def new_publish():
@frappe.whitelist(methods=['POST'], allow_guest=1)
-def new_publish1():
+def new_publish():
req = json.loads(frappe.request.data)
try:
module = req['module']
@@ -366,8 +364,8 @@ def new_publish1():
val = input_validation(req)
- if val != '' or val != "":
- return {"status": 0, "error": "Input parameter Missing: " + val}
+ # if val != '' or val != "":
+ # return {"status": 0, "error": "Input parameter Missing: " + val}
data = []
# Get Publish Type
publish_type = frappe.db.get_list(
@@ -377,21 +375,20 @@ def new_publish1():
publish_type = publish_type[0]['publish_type']
else:
return {"status": 0, "error": "Publish Type Not Set For: " + iid}
-
BASE_PATH = BASE_URL + "/files/json_files/phase2/" + \
- str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/'
- FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \
- str(vehicle) + '/' + str(language) + '/' + \
- "{}".format(publish_type)+"_Full_Update/"
+ str(vehicle) + '/' + str(language) + '/' + str(publish_type) + '/'
+ if module == 'Feature Finder':
+ FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + \
+ "{}".format(publish_type)+"_Full_Update/"
- FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \
- vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
- LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
- str(vehicle) + '/' + str(language) + '/' + "Global_Full_Update/"
- LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
- vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \
+ vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + "Global_Full_Update/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
- if module == 'Feature Finder':
# if publish_type == 'Internal':
if os.path.isfile(FULL_UPDATE_PATH_FILE):
with open(FULL_UPDATE_PATH_FILE) as outfile:
@@ -412,50 +409,145 @@ def new_publish1():
data = data.get(variant)
elif module == 'Repair/Service Check Sheet':
- with open(BASE_PATH + str(vehicle) + '-repair_check_sheet.json') as outfile:
- data = json.load(outfile)
- # data = data['data']
- # filter_json = filter_publish_json(data, variant)
- # data = filter_json
- data = data.get('data')
- data = data.get(variant)
+ FULL_UPDATE_FOLDER = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + \
+ "{}".format(publish_type)+"/"
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_FOLDER + \
+ vehicle.replace(' ', '-') + '-repair_check_sheet.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + "Global/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-repair_check_sheet.json'
+
+ if os.path.isfile(FULL_UPDATE_PATH_FILE):
+ with open(FULL_UPDATE_PATH_FILE) as outfile:
+ data = json.load(outfile)
+
+ data = data.get('data')
+ # data = data.get(variant)
+ else:
+
+ with open(LATEST_GLOBAL_FILE) as outfile:
+ data = json.load(outfile)
+
+ data = data.get('data')
+
+
elif module == 'QWIK Service':
- with open(BASE_PATH + str(vehicle) + '-qwik_service.json') as outfile:
- data = json.load(outfile)
+ frappe.log_error(str(publish_type))
+ FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + \
+ "{}".format(publish_type)+"_Full_Update/"
+
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \
+ vehicle.replace(' ', '-') + '-qwik_service_full_update.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + "Global_Full_Update/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-qwik_service_full_update.json'
+
+ # if publish_type == 'Internal':
+ if os.path.isfile(FULL_UPDATE_PATH_FILE):
+ with open(FULL_UPDATE_PATH_FILE) as outfile:
+ data = json.load(outfile)
# data = data['data']
# filter_json = filter_publish_json(data, variant)
# data = filter_json
- data = data.get('data')
- data = data.get(variant)
+ data = data.get('data')
+ data = data.get(variant)
+ else:
+
+ with open(LATEST_GLOBAL_FILE) as outfile:
+ data = json.load(outfile)
+ # data = data['data']
+ # filter_json = filter_publish_json(data, variant)
+ # data = filter_json
+ data = data.get('data')
+ data = data.get(variant)
elif module == 'Training Information':
- with open(BASE_PATH + str(vehicle) + '-training_information.json') as outfile:
- data = json.load(outfile)
- data = data.get('data')
- # data = data.get(variant)
+ FULL_UPDATE_FOLDER = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + \
+ "{}".format(publish_type)+"/"
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_FOLDER + \
+ vehicle.replace(' ', '-') + '-training_information.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + "Global/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-training_information.json'
+
+ if os.path.isfile(FULL_UPDATE_PATH_FILE):
+ with open(FULL_UPDATE_PATH_FILE) as outfile:
+ data = json.load(outfile)
+
+ data = data.get('data')
+ else:
+
+ with open(LATEST_GLOBAL_FILE) as outfile:
+ data = json.load(outfile)
+
+ data = data.get('data')
elif module == 'Mahindra Special Tool Information':
+ FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + \
+ "{}".format(publish_type)+"_Full_Update/"
+
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \
+ vehicle.replace(' ', '-') + '-special_tool.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/'+ "Global_Full_Update/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-special_tool.json'
+ if os.path.isfile(FULL_UPDATE_PATH_FILE):
+ with open(FULL_UPDATE_PATH_FILE) as outfile:
+ data = json.load(outfile)
+
+ data = data.get('data')
- with open(BASE_PATH + str(vehicle) + '-special_tool_information.json') as outfile:
- data = json.load(outfile)
- # data = data['data']
- # filter_json = filter_publish_json(data, variant)
- # data = filter_json
- data = data.get('data')
- data = data.get(variant)
+ else:
+ with open(LATEST_GLOBAL_FILE) as outfile:
+ data = json.load(outfile)
+ data = data.get('data')
elif module == 'PDI Inspection':
pass
elif module == 'Torque Manual':
- with open(BASE_PATH + str(vehicle) + '-torque_manual.json') as outfile:
- data = json.load(outfile)
+
+ FULL_UPDATE_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + '/' + \
+ "{}".format(publish_type)+"_Full_Update/"
+
+ FULL_UPDATE_PATH_FILE = FULL_UPDATE_PATH + \
+ vehicle.replace(' ', '-') + '-torque_manual_full_update.json'
+ LATEST_GLOBAL_PATH = BASE_URL + "/files/json_files/phase2/" + \
+ str(vehicle) + '/' + str(language) + \
+ '/' + "Global_Full_Update/"
+ LATEST_GLOBAL_FILE = LATEST_GLOBAL_PATH + \
+ vehicle.replace(' ', '-') + '-torque_manual_full_update.json'
+ # if publish_type == 'Internal':
+ if os.path.isfile(FULL_UPDATE_PATH_FILE):
+ with open(FULL_UPDATE_PATH_FILE) as outfile:
+ data = json.load(outfile)
+ # data = data['data']
+ # filter_json = filter_publish_json(data, variant)
+ # data = filter_json
data = data.get('data')
data = data.get(variant)
+ else:
- else:
- data = 'No Module found'
+ with open(LATEST_GLOBAL_FILE) as outfile:
+ data = json.load(outfile)
+ # data = data['data']
+ # filter_json = filter_publish_json(data, variant)
+ # data = filter_json
+ data = data.get('data')
+ data = data.get(variant)
+ torque_column_mapping = frappe.db.sql(
+ f''' select idx as display_order, concat('col',idx) as column_id,column_name from `tabTorque Column Mapping` where parent = '{variant}' order by idx; ''', as_dict=1)
+ data = {'torque_manual_mapping': torque_column_mapping, 'torque_manual_data': data}
return {'status': 1, 'error': None, 'data': data}
@@ -525,8 +617,9 @@ def get_training_information(vehicle, language):
training_information_details = frappe.db.sql('''select name,vehicle,category,display_order,active_status,my_id,keywords from
`tabTraining Information` where vehicle = '%s' && language = '%s';''' % (vehicle, language), as_dict=1)
for i in training_information_details:
- i['content'] = frappe.db.sql('''select idx as 'display_order',topic,content from `tabTraining Information Content Reference`
+ i['content'] = frappe.db.sql('''select idx as 'display_order',topic as file_type,file_url,file_name,scrom_file_url from `tabTraining Information Content Reference`
where parent = '%s' order by display_order;''' % (i['name']), as_dict=1)
+
return True, training_information_details
except Exception as e:
raise e
@@ -536,7 +629,7 @@ def get_training_information(vehicle, language):
# def test_html(vehicle):
# base_path = BASE_URL + "/" + "/training" +"/"+ vehicle
# # Folder creation
-# scrom_path = base_path +"/"+ "/scrom"
+# scrom_path = base_path +"/"+ "/scrom"
# html_path = base_path +"/"+ "/html"
# try:
# # if not os.path.exists(base_path):
@@ -547,7 +640,7 @@ def get_training_information(vehicle, language):
# if not os.path.exists(html_path):
# os.makedirs(html_path)
-
+
# # Create zip file
# # try:
# # if path:
@@ -626,9 +719,6 @@ def input_validation(req):
return sts
-
-
-
@frappe.whitelist()
def get_published_data():
data = frappe.db.sql("""select vehicle,variant,language,publish_type from `tabPublish` where not in (select vehicle,variant,language,publish_type from
diff --git a/smart_service/overrides/overrides/override_file.py b/smart_service/overrides/overrides/override_file.py
index 03786a9..8d150ea 100644
--- a/smart_service/overrides/overrides/override_file.py
+++ b/smart_service/overrides/overrides/override_file.py
@@ -3,7 +3,7 @@ import frappe, json
import json
from frappe.utils import cstr, flt, cint, random_string
def validate(doc=None,method=None):
- data = ['.jpg', '.png','.mp4', '.docx','.xls', '.pdf','.tiff','.ppt','.m4v','.doc','.pps','.pptx']
+ data = ['.jpg', '.png','.mp4', '.docx','.xls', '.pdf','.tiff','.m4v','.doc','.pps']
file_name = doc.file_name
total_count = 0
for i in data:
diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.js b/smart_service/phase_2/doctype/feature_finder/feature_finder.js
index c032815..903c02c 100644
--- a/smart_service/phase_2/doctype/feature_finder/feature_finder.js
+++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.js
@@ -566,7 +566,7 @@ function custom_tab_html(frm) {
//buttons
spec_custom_dt += ''
spec_custom_dt += ''
- spec_custom_dt += ''
+ spec_custom_dt += ''
spec_custom_dt += ''
spec_custom_dt += ''
diff --git a/smart_service/phase_2/doctype/feature_finder/feature_finder.py b/smart_service/phase_2/doctype/feature_finder/feature_finder.py
index 3765a23..346bf73 100644
--- a/smart_service/phase_2/doctype/feature_finder/feature_finder.py
+++ b/smart_service/phase_2/doctype/feature_finder/feature_finder.py
@@ -308,7 +308,7 @@ def file_validation(image=None):
if image:
res = check_img_ext(image)
res1 = check_png_ext(image)
- if res == True:
+ if res == True or res1==True:
(width, height, size) = details_of_image(image)
if width > 1920 or height > 1080 and image:
diff --git a/smart_service/phase_2/doctype/torque_table_column_child_table/__init__.py b/smart_service/phase_2/doctype/torque_column_mapping/__init__.py
similarity index 100%
rename from smart_service/phase_2/doctype/torque_table_column_child_table/__init__.py
rename to smart_service/phase_2/doctype/torque_column_mapping/__init__.py
diff --git a/smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.json b/smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.json
new file mode 100644
index 0000000..0ee19f1
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.json
@@ -0,0 +1,30 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "creation": "2023-12-12 15:05:16.121060",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "column_name"
+ ],
+ "fields": [
+ {
+ "fieldname": "column_name",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Column Name"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "istable": 1,
+ "links": [],
+ "modified": "2023-12-12 17:54:15.341459",
+ "modified_by": "Administrator",
+ "module": "Phase-2",
+ "name": "Torque Column Mapping",
+ "owner": "Administrator",
+ "permissions": [],
+ "sort_field": "modified",
+ "sort_order": "DESC"
+}
\ No newline at end of file
diff --git a/smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.py b/smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.py
similarity index 80%
rename from smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.py
rename to smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.py
index 9e604ce..04db2bc 100644
--- a/smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.py
+++ b/smart_service/phase_2/doctype/torque_column_mapping/torque_column_mapping.py
@@ -4,5 +4,5 @@
# import frappe
from frappe.model.document import Document
-class TorqueTableColumnChildTable(Document):
+class TorqueColumnMapping(Document):
pass
diff --git a/smart_service/phase_2/doctype/torque_column_ref/__init__.py b/smart_service/phase_2/doctype/torque_column_ref/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.json b/smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.json
similarity index 70%
rename from smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.json
rename to smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.json
index 1edbd5d..74ff182 100644
--- a/smart_service/phase_2/doctype/torque_table_column_child_table/torque_table_column_child_table.json
+++ b/smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.json
@@ -1,19 +1,18 @@
{
"actions": [],
"allow_rename": 1,
- "creation": "2023-12-08 14:39:31.936009",
+ "creation": "2023-12-12 12:00:20.112565",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "column_name",
- "column_number"
+ "column",
+ "column_name"
],
"fields": [
{
- "fieldname": "column_number",
+ "fieldname": "column",
"fieldtype": "Data",
- "in_list_view": 1,
"label": "Column Number"
},
{
@@ -26,10 +25,10 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-12-08 15:41:50.141820",
+ "modified": "2023-12-12 12:02:49.911584",
"modified_by": "Administrator",
"module": "Phase-2",
- "name": "Torque Table Column Child Table",
+ "name": "Torque Column Ref",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
diff --git a/smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.py b/smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.py
new file mode 100644
index 0000000..1287b73
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_column_ref/torque_column_ref.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+class TorqueColumnRef(Document):
+ pass
diff --git a/smart_service/phase_2/doctype/torque_manual/torque_manual.js b/smart_service/phase_2/doctype/torque_manual/torque_manual.js
index 68fe501..cb7c653 100644
--- a/smart_service/phase_2/doctype/torque_manual/torque_manual.js
+++ b/smart_service/phase_2/doctype/torque_manual/torque_manual.js
@@ -6,10 +6,9 @@ var vehicle_count = 0;
frappe.ui.form.on('Torque Manual', {
refresh: function(frm) {
set_display_order(frm)
+
},
- onload: function (frm) {
- set_display_order(frm)
- },
+
});
function set_display_order(frm){
if (frm.is_new()) {
diff --git a/smart_service/phase_2/doctype/torque_manual/torque_manual.json b/smart_service/phase_2/doctype/torque_manual/torque_manual.json
index 968844d..f219858 100644
--- a/smart_service/phase_2/doctype/torque_manual/torque_manual.json
+++ b/smart_service/phase_2/doctype/torque_manual/torque_manual.json
@@ -19,6 +19,7 @@
"display_order",
"keywords",
"section_break_cmyhs",
+ "torque_coulmn_name",
"torque_table"
],
"fields": [
@@ -111,11 +112,17 @@
"fieldtype": "Table",
"label": "Torque Table",
"options": "Torque Manual Details"
+ },
+ {
+ "fieldname": "torque_coulmn_name",
+ "fieldtype": "Table",
+ "label": "Torque Coulmn Name",
+ "options": "Torque Column Ref"
}
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-12-07 16:54:27.415271",
+ "modified": "2023-12-12 12:03:24.953779",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Manual",
diff --git a/smart_service/phase_2/doctype/torque_manual_details/torque_manual_details.json b/smart_service/phase_2/doctype/torque_manual_details/torque_manual_details.json
index 8d77b19..cb51820 100644
--- a/smart_service/phase_2/doctype/torque_manual_details/torque_manual_details.json
+++ b/smart_service/phase_2/doctype/torque_manual_details/torque_manual_details.json
@@ -1,53 +1,88 @@
{
"actions": [],
"allow_rename": 1,
- "creation": "2023-12-07 14:52:59.613295",
+ "creation": "2023-12-12 15:11:46.238422",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
- "fastener_description",
- "specification",
- "qty",
- "torque_nm",
- "remark"
+ "col1",
+ "col2",
+ "col3",
+ "col4",
+ "col5",
+ "col6",
+ "col7",
+ "col8",
+ "col9",
+ "col10"
],
"fields": [
{
- "fieldname": "fastener_description",
+ "fieldname": "col1",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Fastener Description"
+ "label": "Col1"
},
{
- "fieldname": "specification",
+ "fieldname": "col2",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Specification"
+ "label": "Col2"
},
{
- "fieldname": "qty",
+ "fieldname": "col3",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Qty"
+ "label": "Col3"
},
{
- "fieldname": "torque_nm",
+ "fieldname": "col4",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Torque NM"
+ "label": "Col4"
},
{
- "fieldname": "remark",
+ "fieldname": "col5",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "Remark"
+ "label": "Col5"
+ },
+ {
+ "fieldname": "col6",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Col6"
+ },
+ {
+ "fieldname": "col7",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Col7"
+ },
+ {
+ "fieldname": "col8",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Col8"
+ },
+ {
+ "fieldname": "col9",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Col9"
+ },
+ {
+ "fieldname": "col10",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Col10"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-12-09 10:34:51.374165",
+ "modified": "2023-12-12 15:11:46.238422",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Manual Details",
diff --git a/smart_service/phase_2/doctype/torque_manual_mapping/__init__.py b/smart_service/phase_2/doctype/torque_manual_mapping/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/smart_service/phase_2/doctype/torque_manual_mapping/test_torque_manual_mapping.py b/smart_service/phase_2/doctype/torque_manual_mapping/test_torque_manual_mapping.py
new file mode 100644
index 0000000..411f619
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_manual_mapping/test_torque_manual_mapping.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and Contributors
+# See license.txt
+
+# import frappe
+import unittest
+
+class TestTorqueManualMapping(unittest.TestCase):
+ pass
diff --git a/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.js b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.js
new file mode 100644
index 0000000..2396e9d
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.js
@@ -0,0 +1,57 @@
+// Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Torque Manual Mapping', {
+ refresh: function(frm) {
+ if (frm.doc.torque_table_columns && frm.doc.torque_table_columns.length==10) {
+ $('[data-fieldname="torque_table_columns"]').find('.grid-add-row').css('display', 'none')
+ }
+ $('.grid-add-row').click(function () {
+ if (frm.doc.torque_table_columns && frm.doc.torque_table_columns.length >=10) {
+ $('.grid-row-open').find('.grid-duplicate-row').css('display', 'none')
+ $('[data-fieldname="torque_table_columns"]').find('.grid-add-row').css('display', 'none')
+ }
+ });
+ $('[data-fieldname="torque_table_columns"]').find('.grid-row').find('.data-row').find('.row-index').click(function () {
+
+ if (frm.doc.torque_table_columns && frm.doc.torque_table_columns.length ==10) {
+ $('.grid-row-open').find('.grid-duplicate-row').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row-below').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row').css('display', 'none')
+ }
+ $('[data-fieldname="torque_table_columns"]').find('.grid-row-open').find('.grid-duplicate-row').click(function () {
+ frm.save()
+ })
+ })
+ $('[data-fieldname="torque_table_columns"]').find('.grid-row').find('.data-row').find('.btn-open-row').click(function () {
+
+ if (frm.doc.torque_table_columns && frm.doc.torque_table_columns.length ==10) {
+ $('.grid-row-open').find('.grid-duplicate-row').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row-below').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row').css('display', 'none')
+ }
+ $('[data-fieldname="torque_table_columns"]').find('.grid-row-open').find('.grid-duplicate-row').click(function () {
+ frm.save()
+ })
+
+ })
+
+
+
+
+ },
+ onload: function (frm) {
+ // set_display_order(frm)
+ if (frm.doc.torque_table_columns && frm.doc.torque_table_columns.length==10) {
+ $('[data-fieldname="torque_table_columns"]').find('.grid-add-row').css('display', 'none')
+ }
+ },
+ after_save:function(frm){
+ if (frm.doc.torque_table_columns.length==10) {
+ $('.grid-row-open').find('.grid-duplicate-row').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row-below').css('display', 'none')
+ $('.grid-row-open').find('.grid-insert-row').css('display', 'none')
+ }
+
+ }
+});
diff --git a/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.json b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.json
new file mode 100644
index 0000000..bf799a7
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.json
@@ -0,0 +1,66 @@
+{
+ "actions": [],
+ "allow_rename": 1,
+ "autoname": "format:{variant}",
+ "creation": "2023-12-12 14:44:55.388191",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "variant",
+ "vehicle",
+ "section_break_bytf4",
+ "torque_table_columns"
+ ],
+ "fields": [
+ {
+ "fieldname": "variant",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Variant",
+ "options": "Variant Mapping",
+ "reqd": 1
+ },
+ {
+ "fetch_from": "variant.vehicle",
+ "fieldname": "vehicle",
+ "fieldtype": "Data",
+ "in_list_view": 1,
+ "label": "Vehicle",
+ "reqd": 1
+ },
+ {
+ "fieldname": "section_break_bytf4",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "torque_table_columns",
+ "fieldtype": "Table",
+ "label": "Torque Table Columns",
+ "options": "Torque Column Mapping"
+ }
+ ],
+ "index_web_pages_for_search": 1,
+ "links": [],
+ "modified": "2023-12-12 15:05:37.251699",
+ "modified_by": "Administrator",
+ "module": "Phase-2",
+ "name": "Torque Manual Mapping",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC"
+}
\ No newline at end of file
diff --git a/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.py b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.py
new file mode 100644
index 0000000..e2d6145
--- /dev/null
+++ b/smart_service/phase_2/doctype/torque_manual_mapping/torque_manual_mapping.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2023, Hard n Soft Technologies Pvt Ltd and contributors
+# For license information, please see license.txt
+
+# import frappe
+from frappe.model.document import Document
+
+class TorqueManualMapping(Document):
+ pass
diff --git a/smart_service/phase_2/doctype/torque_table_column/torque_table_column.json b/smart_service/phase_2/doctype/torque_table_column/torque_table_column.json
index c29ee0d..88c9bce 100644
--- a/smart_service/phase_2/doctype/torque_table_column/torque_table_column.json
+++ b/smart_service/phase_2/doctype/torque_table_column/torque_table_column.json
@@ -1,6 +1,7 @@
{
"actions": [],
"allow_rename": 1,
+ "autoname": "format:{variant}",
"creation": "2023-12-08 14:40:14.464142",
"doctype": "DocType",
"editable_grid": 1,
@@ -41,7 +42,7 @@
],
"index_web_pages_for_search": 1,
"links": [],
- "modified": "2023-12-08 15:09:13.185244",
+ "modified": "2023-12-12 12:09:57.530576",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Torque Table Column",
diff --git a/smart_service/phase_2/doctype/torque_table_column/torque_table_column.py b/smart_service/phase_2/doctype/torque_table_column/torque_table_column.py
index 5b2d8f6..22b5983 100644
--- a/smart_service/phase_2/doctype/torque_table_column/torque_table_column.py
+++ b/smart_service/phase_2/doctype/torque_table_column/torque_table_column.py
@@ -6,3 +6,12 @@ from frappe.model.document import Document
class TorqueTableColumn(Document):
pass
+
+
+# def generate_col_seq(col_data):
+# try:
+
+
+
+# except Exception as e:
+# return str(e)
\ No newline at end of file
diff --git a/smart_service/phase_2/doctype/training_information/training_information.js b/smart_service/phase_2/doctype/training_information/training_information.js
index d2bce23..37270a6 100644
--- a/smart_service/phase_2/doctype/training_information/training_information.js
+++ b/smart_service/phase_2/doctype/training_information/training_information.js
@@ -21,25 +21,47 @@ frappe.ui.form.on('Training Information', {
cur_frm.refresh_fields("display_order")
},
after_save: function (frm) {
- // if (frm.doc.content) {
- // frappe.call({
- // method: "smart_service.phase_2.doctype.training_information.training_information.file_validation_child",
- // args: {
- // name: frm.doc.name,
- // value:"content"
- // },
- // callback: function (r) {
- // console.log(r.message, "----file validation size-------")
- // if (r.message && r.message[0] == false) {
- // if (r.message[1]) {
- // frappe.validated = false;
- // frm.reload_doc()
- // frappe.throw("In training information content " + r.message[1] + " row was invalid file type")
- // }
- // }
- // }
- // })
- // }
+ if (frm.doc.content) {
+ frappe.call({
+ method: "smart_service.phase_2.doctype.training_information.training_information.extract_scrom_package",
+ args: {
+ data: frm.doc.name,
+ vehicle:frm.doc.vehicle
+ },
+ callback: function (r) {
+ debugger
+ console.log(r.message, "----file validation size-------")
+ // if (r.message && r.message[0] == false) {
+ // if (r.message[1]) {
+ // frappe.validated = false;
+ // frm.reload_doc()
+ // frappe.throw("In training information content " + r.message[1] + " row was invalid file type")
+ // }
+ // }
+ }
+ })
+ }
+
+ if (frm.doc.content) {
+ frappe.call({
+ method: "smart_service.phase_2.doctype.training_information.training_information.file_validation_child",
+ args: {
+ name: frm.doc.name,
+ value:"file_url"
+ },
+ callback: function (r) {
+ console.log(r.message, "----file validation size-------")
+ if (r.message && r.message[0] == false) {
+ if (r.message[1]) {
+ frappe.validated = false;
+ frm.reload_doc()
+ console.log(frm.doc.content,"content")
+ frappe.throw("In Training Information content " + r.message[1] + " row was invalid file type")
+ }
+ }
+ }
+ })
+ }
}
});
function set_display_order(frm){
diff --git a/smart_service/phase_2/doctype/training_information/training_information.py b/smart_service/phase_2/doctype/training_information/training_information.py
index 5e7e38b..20d4840 100644
--- a/smart_service/phase_2/doctype/training_information/training_information.py
+++ b/smart_service/phase_2/doctype/training_information/training_information.py
@@ -3,7 +3,14 @@
import frappe
from frappe.model.document import Document
+from frappe.utils import cstr
+import zipfile
+import os
+from smart_service.apis.utils import check_png_ext, check_img_ext, check_zip_ext, check_pdf_ext, details_of_image, checking_image,check_mp4_ext,check_ppt_ext,get_file_size
+site_name = cstr(frappe.local.site)
+BASE_URL = os.path.expanduser(
+ "~") + "/frappe-bench/sites/" + site_name + "/public"
class TrainingInformation(Document):
def validate(self):
try:
@@ -23,6 +30,7 @@ class TrainingInformation(Document):
self.my_id = count[0][0] + 1
else:
self.my_id = 1
+ # extract_scrom_package(self.name,self.vehicle)
except Exception as e:
raise e
@@ -89,3 +97,136 @@ class TrainingInformation(Document):
# d['image_video'], 'Training Information', value, name, 1)
# frappe.delete_doc('Training Information Content Reference', d['name'])
# return (ret, d['idx'])
+
+@frappe.whitelist()
+def extract_scrom_package(data,vehicle):
+ training_information = training_information_data(data)
+ frappe.log_error(str(training_information))
+ BASE_URL1 = os.path.expanduser(
+ "~") + "/frappe-bench/sites/" + site_name + "/public/files"
+ base_path = BASE_URL1 + "/" + "training" +"/"+ vehicle
+ scrom_path = base_path +"/"+ "scrom/"
+ scrom_file_url= ""
+ for d in training_information['data']:
+ if d['file_url']:
+ # for ch in d['content']:
+ if d['topic'] == 'SCROM' and d['file_url']!=None:
+ scrom_folder_path =scrom_path+d['file_url'].split("/")[2].split(".")[0]+"/"
+ file_name = d['file_name']
+ name = d['name']
+ if not os.path.exists(scrom_folder_path):
+ os.makedirs(scrom_folder_path)
+ if d['file_url']:
+ frappe.log_error(str(scrom_folder_path))
+ with zipfile.ZipFile(BASE_URL+d['file_url'], 'r') as zip_ref:
+ zip_ref.extractall(path=scrom_folder_path)
+ base_path_url = "/files/training/"+vehicle+"/scrom/"+ d['file_url'].split("/")[2].split(".")[0]+"/"
+ file_path_url="/files/training/"+vehicle+"/scrom/"+ d['file_url'].split("/")[2].split(".")[0]+"/"
+ # for file in os.listdir(scrom_folder_path):
+ # d = os.path.join(scrom_folder_path, file)
+ # if os.path.isdir(d):
+ # base_folder_name = d.split('/')[-1]
+ for root, dirs, files in os.walk(scrom_folder_path):
+ if file_name in files:
+ base_folder_name = os.path.join(root, file_name)
+ scrom_file_url = base_folder_name.split("/public")[1:][0]
+
+ # return os.path.join(root, file_name)
+
+
+ frappe.log_error('scrom_file_url'+str(scrom_file_url))
+ update_base_path = frappe.db.sql(f"""update `tabTraining Information Content Reference` set base_path = '{base_path_url}', scrom_file_url ='{scrom_file_url}' where name='{name}'""")
+ return {"success":1,"data":data}
+
+
+ # except Exception as e:
+ # return "Extarction Failed"+str(e)
+
+
+def training_information_data(parent=None):
+ try:
+
+ # training_information_details = frappe.db.sql(f"""select name,vehicle,category,language,display_order,keywords,my_id,active_status from
+ # `tabTraining Information` where name='{parent}' """, as_dict=1)
+
+ # for d in training_information_details:
+ training_information_details = frappe.db.sql(f"""select idx as 'display_order',name,topic,scrom_file_url,base_path,file_url,file_name from `tabTraining Information Content Reference`
+ where parent = '{parent}'
+ order by display_order ;""", as_dict=1)
+
+
+ return {"success" :1 ,"data":training_information_details}
+
+ except Exception as e:
+
+ return {"success" :0 ,"data":None,"msg":str(e)}
+
+
+@frappe.whitelist()
+def file_validation_child(name, value=None):
+ try:
+ if name:
+ child_data = frappe.db.sql(
+ """select * from `tabTraining Information Content Reference` where parent='%s';""" % (name), as_dict=1)
+ for d in child_data:
+ if d['topic'] and d['file_url']:
+ res = False
+ res1 = False
+ if d['topic'] =="PDF":
+ res = check_pdf_ext(d['file_url'])
+
+ if d['topic'] =="PPT":
+ res = check_ppt_ext(d['file_url'])
+
+ if d['topic'] =="Video":
+ res = check_mp4_ext(d['file_url'])
+
+ if d['topic'] =="Image":
+ res = check_img_ext(d['file_url'])
+ res1 = check_png_ext(d['file_url'])
+
+ if d['topic'] =="SCROM":
+ res = check_zip_ext(d['file_url'])
+
+ if res==True or res1==True:
+ valuess,idx = get_file_details(d['topic'],d['file_url'],value,name,1,d['idx'],d['name'])
+ else:
+ valuess,idx = delete_child_image(d['file_url'],value,name,1,d['idx'],d['name'])
+ if valuess==False:
+ return valuess,idx
+
+ return child_data
+ except Exception as e:
+ frappe.log_error("file validation child",frappe.get_traceback())
+
+
+
+
+def get_file_details(topic,file_url,value,name,is_child,idx,child_doc):
+ if topic=="Image":
+ width, height, size = details_of_image(file_url)
+ if width > 1280 or height > 720 or size > 1*1024 and file_url:
+ delete_child_image(file_url,value,name,is_child,idx,child_doc)
+ else:
+ return True,idx
+ else:
+ size = get_file_size(file_url)
+ frappe.log_error("file_size",str(size))
+ if size>500 and file_url:
+ valuess,idx = delete_child_image(file_url,value,name,is_child,idx,child_doc)
+ return valuess,idx
+ else:
+ return True,idx
+
+def delete_child_image(file_url,value,name,is_child,idx,child_doc):
+ try:
+ val = checking_image(
+ file_url, "Training Information", value, name, is_child)
+ doc = frappe.get_doc('Training Information Content Reference', child_doc)
+ doc.file_url = ''
+ doc.save()
+ if val == True:
+ return False, idx
+
+ except Exception as e:
+ frappe.log_error("delete_child_image",frappe.get_traceback())
diff --git a/smart_service/phase_2/doctype/training_information_content_reference/training_information_content_reference.json b/smart_service/phase_2/doctype/training_information_content_reference/training_information_content_reference.json
index 93f26c9..a69b4e2 100644
--- a/smart_service/phase_2/doctype/training_information_content_reference/training_information_content_reference.json
+++ b/smart_service/phase_2/doctype/training_information_content_reference/training_information_content_reference.json
@@ -7,17 +7,13 @@
"engine": "InnoDB",
"field_order": [
"topic",
- "content",
+ "file_url",
"base_path",
"file_name",
- "file_url"
+ "scrom_file_url",
+ "scrom_index_page"
],
"fields": [
- {
- "fieldname": "content",
- "fieldtype": "Attach",
- "label": "Content"
- },
{
"fieldname": "topic",
"fieldtype": "Select",
@@ -28,6 +24,7 @@
{
"fieldname": "base_path",
"fieldtype": "Data",
+ "hidden": 1,
"in_list_view": 1,
"label": "Base Path"
},
@@ -39,15 +36,26 @@
},
{
"fieldname": "file_url",
+ "fieldtype": "Attach",
+ "label": "File Url"
+ },
+ {
+ "fieldname": "scrom_file_url",
+ "fieldtype": "Data",
+ "hidden": 1,
+ "label": "Scrom File Url"
+ },
+ {
+ "fieldname": "scrom_index_page",
"fieldtype": "Data",
"in_list_view": 1,
- "label": "File Url"
+ "label": "Scrom Index Page"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
- "modified": "2023-12-08 17:19:25.723894",
+ "modified": "2023-12-12 13:48:55.828272",
"modified_by": "Administrator",
"module": "Phase-2",
"name": "Training Information Content Reference",
diff --git a/smart_service/transactions/doctype/publish/publish.js b/smart_service/transactions/doctype/publish/publish.js
index b0b2c6c..8205f25 100644
--- a/smart_service/transactions/doctype/publish/publish.js
+++ b/smart_service/transactions/doctype/publish/publish.js
@@ -377,7 +377,7 @@ frappe.ui.form.on("Publish", {
module_name: frm.doc.publish_module,
},
callback: function (r) {
-
+
window.location.href =
r.message.url +
"/app/publish" +
@@ -492,9 +492,9 @@ frappe.ui.form.on("Publish", {
},
callback: function (r) {
if (r.message) {
- debugger
+
if (r.message == frm.doc.version) {
- debugger
+
frm.add_custom_button(__("Global Publish"), function () {
frappe.call({
method:
@@ -1213,6 +1213,13 @@ frappe.ui.form.on("Publish", {
frappe.msgprint("Variant Mapping already added");
}
+ if (dulicate === false && frm.doc.publish_module == "Torque Manual") {
+ let child = cur_frm.add_child("publish_documents");
+ child.variant_mapping = frm.doc.variant_mapping;
+ child.item_category = "Variant Mapping";
+ refresh_field("publish_documents");
+ frappe.msgprint(__(frm.doc.variant_mapping + " Added"));
+ }
},
km_report: function (frm, cdt, cdn) {
if (frm.doc.publish_module == "Automotive System") {
@@ -1754,20 +1761,22 @@ function add_training_information(frm) {
if (frm.doc.training_information_publish_docs) {
frm.doc.training_information_publish_docs.forEach(
(publishrow) => {
- if (row.parent_name == publishrow.parent_name) {
+ if (row.parent1 == publishrow.parent1) {
dulicate = true;
}
}
);
}
if (dulicate === false) {
- let child = cur_frm.add_child("training_information_publish_docs");
+ let child = cur_frm.add_child(
+ "training_information_publish_docs"
+ );
child.vehicle = frm.doc.vehicle;
child.parent1 = row.parent1;
child.language = row.language;
child.category = row.category;
child.active_status = row.active_status;
- refresh_field("training_information_publish_docs");
+ // refresh_field("training_information_publish_docs");
added += 1;
frm.set_df_property(
"training_information_publish_docs",
@@ -1785,8 +1794,9 @@ function add_training_information(frm) {
"btn btn-xs btn-secondary grid-add-row"
)[5]
.setAttribute("style", "display:none;");
- } else {
- frappe.msgprint(__(row.parent_name + "Alraeady Added"));
+ }
+ else {
+ frappe.msgprint(__(row.parent1 + "Alraeady Added"));
}
}
});
diff --git a/smart_service/transactions/doctype/publish/publish.json b/smart_service/transactions/doctype/publish/publish.json
index 1e9d9b9..fa6c62a 100644
--- a/smart_service/transactions/doctype/publish/publish.json
+++ b/smart_service/transactions/doctype/publish/publish.json
@@ -209,7 +209,7 @@
"read_only": 1
},
{
- "depends_on": "eval:doc.variant_mapping && doc.publish_type == \"Internal\" && doc.docstatus == 0 && doc.publish_module=='Automotive System';",
+ "depends_on": "eval:doc.variant_mapping && doc.publish_type == \"Internal\" && doc.docstatus == 0 && doc.publish_module=='Automotive System' || doc.publish_module=='Torque Manual'",
"fieldname": "add_variant_mapping_to_publish",
"fieldtype": "Button",
"label": "Add Variant Mapping to Publish",
@@ -440,7 +440,7 @@
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
- "modified": "2023-12-06 15:38:46.908708",
+ "modified": "2023-12-12 23:25:19.477106",
"modified_by": "Administrator",
"module": "Transactions",
"name": "Publish",
diff --git a/smart_service/transactions/doctype/publish/publish.py b/smart_service/transactions/doctype/publish/publish.py
index cd8b03c..35117e3 100644
--- a/smart_service/transactions/doctype/publish/publish.py
+++ b/smart_service/transactions/doctype/publish/publish.py
@@ -1,5 +1,6 @@
# Copyright (c) 2021, Hard n Soft Technologies Pvt Ltd and contributors
# For license information, please see license.txt
+import zipfile
from deepdiff import DeepDiff
import pandas as pd
import copy
@@ -19,7 +20,8 @@ site_name = cstr(frappe.local.site)
base_url = os.path.expanduser(
"~") + "/frappe-bench/sites/" + site_name + "/public" + "/files" + "/json_files" + "/phase2"
-
+BASE_URL_PUBLIC = os.path.expanduser(
+ "~") + "/frappe-bench/sites/" + site_name + "/public"
frappe.utils.logger.set_log_level("DEBUG")
success_reponse = {"status": 1, "data": "", "message": ""}
failure_reponse = {"status": 0, "data": "", "error": ""}
@@ -59,7 +61,7 @@ class Publish(Document):
str(km_mapping.name) + '\n'
def on_submit(self):
-
+
# qwik_service
if self.docstatus == 1 and self.publish_status == 'To Publish' and self.publish_module == 'Qwik Service' and self.publish_type == 'Internal':
qwik_service_publish(self.vehicle, self.vehicle_id, self.variant,
@@ -127,49 +129,50 @@ class Publish(Document):
update_special_tool_publish_docs(self)
update_publish_status = frappe.db.sql(
"""update `tabPublish` set publish_status='Published' where name='{0}'""".format(self.name))
-
+
# torque_manual
if self.docstatus == 1 and self.publish_status == 'To Publish' and self.publish_module == 'Torque Manual' and self.publish_type == 'Internal' and self.torque_manual_publish_docs:
torque_manual_publish(self.vehicle, self.vehicle_id,
- self.language, self.publish_type,
- self.release_description, self.torque_manual_publish_docs, self.variant)
+ self.language, self.publish_type,
+ self.release_description, self.torque_manual_publish_docs, self.variant)
update_publish_status = frappe.db.sql(
"""update `tabPublish` set publish_status='Published' where name='{0}'""".format(self.name))
if self.docstatus == 1 and self.publish_module == 'Torque Manual' and self.publish_type == 'Global':
torque_manual_publish(self.vehicle, self.vehicle_id,
- self.language, self.publish_type,
- self.release_description, self.torque_manual_publish_docs,
- self.variant)
+ self.language, self.publish_type,
+ self.release_description, self.torque_manual_publish_docs,
+ self.variant)
update_torque_manual_published_docs(self)
update_publish_status = frappe.db.sql(
"""update `tabPublish` set publish_status='Published' where name='{0}'""".format(self.name))
# training information
if self.docstatus == 1 and self.publish_status == 'To Publish' and self.publish_module == 'Training Information' and self.publish_type == 'Internal':
training_information_publish(self.vehicle, self.vehicle_id,
- self.language, self.publish_type,
- self.release_description,self.variant,
- self.training_information_publish_docs)
+ self.language, self.publish_type,
+ self.release_description, self.variant,
+ self.training_information_publish_docs)
update_publish_status = frappe.db.sql(
"""update `tabPublish` set publish_status='Published' where name='{0}'""".format(self.name))
if self.docstatus == 1 and self.publish_status == 'To Publish' and self.publish_module == 'Training Information' and self.publish_type == 'Global' and self.training_information_publish_docs:
training_information_publish(self.vehicle, self.vehicle_id,
- self.language, self.publish_type,
- self.release_description,self.variant,
- self.training_information_publish_docs)
+ self.language, self.publish_type,
+ self.release_description, self.variant,
+ self.training_information_publish_docs)
update_training_published_docs(self)
update_publish_status = frappe.db.sql(
"""update `tabPublish` set publish_status='Published' where name='{0}'""".format(self.name))
variant = self.variant_mapping_details.split('/n')
- update_publish_mapping(self.vehicle, self.variant_mapping,
- self.language, self.publish_module, self.publish_type)
+
+ update_publish_mapping(self.vehicle, i,
+ self.language, self.publish_module, self.publish_type)
if self.publish_type == 'Internal':
frappe.db.sql(
'''update `tabVehicle` set internal_publish = 1 where vehicle= '{0}'; '''.format(self.vehicle))
-
+
frappe.db.sql(
'''update `tabCustom Languages` set internal_publish = 1 where lang_code= '{0}'; '''.format(self.language))
@@ -247,18 +250,20 @@ def update_qwik_published_docs(self):
except Exception as ex:
return str(ex)
-
+
+
def update_torque_manual_published_docs(self):
try:
for d in self.torque_manual_publish_docs:
- frappe.db.sql(''' update `tabTorque Manual` set is_published=1 where name='{0}';'''.format(d.parent1))
+ frappe.db.sql(
+ ''' update `tabTorque Manual` set is_published=1 where name='{0}';'''.format(d.parent1))
frappe.db.commit()
except Exception as e:
raise e
def update_publish_mapping(vehicle, variant, language, module, publish_type):
- # frappe.set_user('Administrator')
+
try:
if module == 'Repair service':
pub_data = frappe.db.get_list('Module Publish Mapping', filters={
@@ -271,10 +276,10 @@ def update_publish_mapping(vehicle, variant, language, module, publish_type):
for d in pub_data:
if d['name']:
frappe.db.sql(
- f"""UPDATE `tabModule Publish Mapping` set repairservice_check_sheet='1',publish_type='{publish_type}' where name ='{d['name']}'""")
+ f"""UPDATE `tabModule Publish Mapping` set repairservice_check_sheet='1',publish_type='{publish_type}' where name ='{d['name']}'""",as_dict=1)
frappe.db.commit()
-
+
elif module == 'Training Information':
pub_data = frappe.db.get_list('Module Publish Mapping', filters={
"vehicle": vehicle,
@@ -286,7 +291,7 @@ def update_publish_mapping(vehicle, variant, language, module, publish_type):
for d in pub_data:
if d['name']:
frappe.db.sql(
- f"""UPDATE `tabModule Publish Mapping` set training_information='1',publish_type='{publish_type}' where name ='{d['name']}'""")
+ f"""UPDATE `tabModule Publish Mapping` set training_information='1',publish_type='{publish_type}' where name ='{d['name']}' """,as_dict=1)
frappe.db.commit()
@@ -333,6 +338,9 @@ def update_publish_mapping(vehicle, variant, language, module, publish_type):
elif module == 'Qwik Service':
doc.qwik_service = 1
+ elif module == 'Torque Manual':
+ doc.torque_information_nm = 1
+
doc.save()
except Exception as e:
frappe.log_error("update_publish_mapping", str(e))
@@ -434,18 +442,20 @@ def global_publish(doc):
def generate_global_publish(name, module_name):
try:
res = frappe.get_doc("Publish", name)
+ # frappe.log_error('res'+str(res.variant_mapping))
if module_name == 'Feature Finder':
ret = frappe.get_doc({
"doctype": "Publish",
"vehicle": res.vehicle,
"publish_type": "Global",
"language": res.language,
- "variant_mapping_details": res.variant_mapping_details,
+ "variant_mapping_details": res.variant_mapping,
"publish_status": 'To Publish',
"release_description": res.release_description,
"publish_module": res.publish_module,
"feature_finder_publish_docs": res.feature_finder_publish_docs
})
+ ret.save()
elif module_name == 'Repair service':
ret = frappe.get_doc({
"doctype": "Publish",
@@ -453,11 +463,13 @@ def generate_global_publish(name, module_name):
"publish_type": "Global",
"language": res.language,
"variant_mapping_details": res.variant_mapping,
+
"publish_status": 'To Publish',
"release_description": res.release_description,
"publish_module": res.publish_module,
"repiar_checksheet_publish_docs": res.repiar_checksheet_publish_docs
})
+ ret.save()
elif module_name == 'Qwik Service':
ret = frappe.get_doc({
"doctype": "Publish",
@@ -470,6 +482,7 @@ def generate_global_publish(name, module_name):
"publish_module": res.publish_module
})
+ ret.save()
elif module_name == 'Special Tool':
ret = frappe.get_doc({
"doctype": "Publish",
@@ -481,18 +494,23 @@ def generate_global_publish(name, module_name):
"publish_module": res.publish_module
})
+ ret.save()
elif module_name == 'Torque Manual':
+ # frappe.log_error(str(res))
ret = frappe.get_doc({
"doctype": "Publish",
"vehicle": res.vehicle,
"publish_type": "Global",
"language": res.language,
- "variant_mapping_details": res.variant_mapping_details,
+ "variant_mapping_details": res.variant_mapping,
"publish_status": 'To Publish',
+ "variant_mapping": res.variant_mapping,
"release_description": res.release_description,
"publish_module": res.publish_module,
"torque_manual_publish_docs": res.torque_manual_publish_docs
- })
+ })
+ ret.save()
+ # frappe.log_error('ret'+str(ret))
elif module_name == 'Training Information':
ret = frappe.get_doc({
"doctype": "Publish",
@@ -687,7 +705,8 @@ def update_feature_published_docs(self):
def update_repair_published_docs(self):
try:
- for d in self.repair_checksheet_publish:
+ # frappe.log_error(str(self))
+ for d in self.repiar_checksheet_publish_docs:
frappe.db.sql(
"""UPDATE `tabRepair Service Mapping` set published=1 where name='{0}'""".format(d.parent_name))
frappe.db.commit()
@@ -695,7 +714,6 @@ def update_repair_published_docs(self):
except Exception as e:
frappe.throw(str(e))
-
def update_special_tool_publish_docs(self):
try:
for d in self.special_tool_publish_docs:
@@ -705,6 +723,7 @@ def update_special_tool_publish_docs(self):
except Exception as e:
frappe.throw(str(e))
+
def update_training_published_docs(self):
try:
for d in self.training_information_publish:
@@ -715,6 +734,7 @@ def update_training_published_docs(self):
except Exception as e:
frappe.throw(str(e))
+
def create_publish_folders(folder_url):
try:
if not os.path.isdir(folder_url + "/" + "Global"):
@@ -722,7 +742,7 @@ def create_publish_folders(folder_url):
if not os.path.isdir(folder_url + "/" + "Internal"):
os.makedirs(folder_url + "/" + "Internal")
-
+
if not os.path.isdir(folder_url + "/" + "Internal_Full_Update"):
os.makedirs(folder_url + "/" + "Internal_Full_Update")
@@ -736,6 +756,8 @@ def create_publish_folders(folder_url):
'''New Module Publish Section'''
+
+
def create_df(data_set):
feature_finder_tmp = json.dumps(data_set)
df = pd.DataFrame(json.loads(feature_finder_tmp))
@@ -749,6 +771,7 @@ def create_df(data_set):
res1[key] = value.to_dict('records')
return res1
+
def repair_checksheet_publish(vehicle, vehicle_id,
language, publish_type, release_description,
variant, parent):
@@ -764,12 +787,17 @@ def repair_checksheet_publish(vehicle, vehicle_id,
'''Publish Ready Flags'''
publish_repair_checksheet = 0
repair_checksheet_tmp = []
+ repair_checksheet_tmp1 = []
'''Create Folder For Publish'''
create_publish_folders(folder_url)
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json'
-
+ # full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
+ global_file_path = folder_url + "/" + 'Global' + "/" + \
+ vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json'
+ check_inter_file = folder_url + "/" + 'Internal' + "/" + \
+ vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json'
'''Append Published Data to Json'''
logger.info(
f'Repair Checksheet Data Append Start::{vehicle}-{language}-{publish_type}')
@@ -783,12 +811,27 @@ def repair_checksheet_publish(vehicle, vehicle_id,
}
'''update existing global json file'''
- if os.path.isfile(file_path):
- with open(file_path) as f:
+ if os.path.isfile(global_file_path) and publish_type == 'Internal':
+ if os.path.isfile(global_file_path):
+ with open(global_file_path) as f:
+ published_data = json.load(f)
+ for i in parent:
+ repair_checksheet = repair_checksheet_data(vehicle, language,
+ publish_type, i.parent_name)
+ if repair_checksheet['status'] == 1:
+ publish_repair_checksheet = 1
+ repair_checksheet_tmp.append(
+ repair_checksheet['data'][0])
+
+ repair_checksheet_tmp = get_latest_data({'data': published_data['data']}, {
+ 'data': repair_checksheet_tmp})
+
+ elif os.path.isfile(global_file_path) and publish_type == 'Global':
+ with open(global_file_path) as f:
published_data = json.load(f)
for i in parent:
repair_checksheet = repair_checksheet_data(vehicle, language,
- publish_type, i.parent_name)
+ publish_type, i.parent_name)
if repair_checksheet['status'] == 1:
publish_repair_checksheet = 1
repair_checksheet_tmp.append(
@@ -797,7 +840,23 @@ def repair_checksheet_publish(vehicle, vehicle_id,
repair_checksheet_tmp = get_latest_data({'data': published_data['data']}, {
'data': repair_checksheet_tmp})
+ if os.path.isfile(check_inter_file):
+ # frappe.log_error(str('internal aleady exists'))
+ with open(check_inter_file) as f:
+ published_data = json.load(f)
+ for i in parent:
+ repair_checksheet = repair_checksheet_data(vehicle, language,
+ publish_type, i.parent_name)
+ if repair_checksheet['status'] == 1:
+ publish_repair_checksheet = 1
+ repair_checksheet_tmp1.append(
+ repair_checksheet['data'][0])
+ # frappe.log_error('before' + str(repair_checksheet_tmp1))
+ repair_checksheet_tmp = get_latest_data({'data': published_data['data']}, {
+ 'data': repair_checksheet_tmp1})
+
else:
+ # frappe.log_error(str('new internal publish'))
for i in parent:
repair_checksheet = repair_checksheet_data(
vehicle, language, publish_type, i.parent_name)
@@ -808,17 +867,27 @@ def repair_checksheet_publish(vehicle, vehicle_id,
repair_checksheet_tmp.append(
repair_checksheet['data'][0])
# frappe.msgprint(str(repair_checksheet['data'][0]))
- repair_checksheet_tmp = get_latest_data(
- {'data': repair_checksheet_tmp}, {'data': []})
+ repair_checksheet_tmp = get_latest_data(
+ {'data': repair_checksheet_tmp}, {'data': []})
+ # frappe.log_error('new pub data' +str(repair_checksheet_tmp))
+ if publish_type == "Global":
+ #Remove Internal File On Global Publish
+ internal_check = folder_url + "/" + 'Internal' + "/" + \
+ vehicle.replace(' ', '-') + '-repair_check_sheet' + '.json'
+ if os.path.isfile(internal_check):
+ os.remove(internal_check)
if publish_repair_checksheet == 1:
+
""" Save publish file """
vehicle_data['data'] = repair_checksheet_tmp
+ # frappe.log_error(str(file_path))
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
return 1, file_path
+
except Exception as e:
logger.info(
@@ -865,7 +934,6 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
vehicle.replace(' ', '-') + '-feature_finder' + '.json'
full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
- # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
global_full_update_path = folder_url + "/" + "Global_Full_Update"
check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
@@ -897,11 +965,10 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, feature_finder_tmp)
elif os.path.isfile(file_path) and publish_type == 'Global':
- frappe.log_error('inside')
+ # frappe.log_error('inside')
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
with open(check_glbl_full_updte) as f:
@@ -932,7 +999,7 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
with open(check_full_update_file) as f:
published_data = json.load(f)
for i in parent:
- frappe.log_error("i",str(parent))
+ # frappe.log_error("i",str(parent))
feature_finder = feature_finder_data(
vehicle, language, publish_type, i.parent1, i.variant)
if feature_finder['status'] == 1:
@@ -946,7 +1013,7 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
feature_finder_tmp = create_df(feature_finder_tmp)
latest_pub_data['data'] = feature_finder_tmp
- vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
+ vehi_data = compare_get_data({'data': published_data}, {
'data': feature_finder_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@@ -960,7 +1027,7 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
#First Time Global Or Internal
else:
for i in parent:
- frappe.log_error("619",str(parent))
+ # frappe.log_error("619",str(parent))
feature_finder = feature_finder_data(
vehicle, language, publish_type, i.parent1, i.variant)
@@ -973,7 +1040,7 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
frappe.throw('failed to publish')
feature_finder_tmp = get_latest_data(
{'data': feature_finder_tmp}, {'data': []})
- frappe.log_error(str(feature_finder_tmp))
+ # frappe.log_error(str(feature_finder_tmp))
vehi_data = create_df(feature_finder_tmp)
latest_pub_data['data'] = vehi_data
@@ -981,7 +1048,7 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
+ # frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
""" Save publish file """
@@ -1007,10 +1074,10 @@ def feature_finder_publish(vehicle=None, vehicle_id=None,
frappe.throw('Failed To Publish')
-def qwik_service_publish(vehicle=None, vehicle_id=None,
+def qwik_service_publish(vehicle=None, vehicle_id=None,variant=None,
language=None, publish_type=None,
- release_description=None, parent=None,
- variant=None):
+ release_description=None, parent=None
+ ):
try:
logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}'
logger = frappe.logger(logger_file,
@@ -1045,10 +1112,10 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
global_file_path = folder_url + "/" + 'Global' + "/" + \
vehicle.replace(' ', '-') + '- qwik_service' + '.json'
- full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json'
- # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
+ full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
+
global_full_update_path = folder_url + "/" + "Global_Full_Update"
- check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '- qwik_service_full_update' + '.json'
+ check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
'''Append Published Data to Json'''
@@ -1078,11 +1145,11 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, qwik_service_tmp)
elif os.path.isfile(file_path) and publish_type == 'Global':
- frappe.log_error('inside')
+ # frappe.log_error('inside')
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
+ # frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
with open(check_glbl_full_updte) as f:
@@ -1096,7 +1163,7 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
qwik_service['data'][0])
qwik_service_tmp = create_df(qwik_service_tmp)
latest_pub_data['data'] = qwik_service_tmp
- vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
+ vehi_data = compare_get_data({'data': published_data['data']}, {
'data': qwik_service_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@@ -1106,10 +1173,11 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, qwik_service_tmp)
else:
#Existing Global or Internal Full Update
-
- internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
+ internal_full_update_check = folder_url + "/" +"Internal_Full_Update"
check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
+ # frappe.log_error(str(check_full_update_file))
if os.path.isfile(check_full_update_file):
+ # frappe.log_error(str('inside internal publish'))
with open(check_full_update_file) as f:
published_data = json.load(f)
for i in parent:
@@ -1127,7 +1195,7 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
qwik_service_tmp = create_df(qwik_service_tmp)
latest_pub_data['data'] = qwik_service_tmp
- vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
+ vehi_data = compare_get_data({'data': published_data['data']}, {
'data': qwik_service_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
@@ -1146,8 +1214,10 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
#First Time Global Or Internal
else:
+
+ # frappe.log_error("211"+str(parent))
for i in parent:
- frappe.log_error("211",str(i))
+
qwik_service = qwik_service_data(
vehicle, language, publish_type, i.parent1, i.variant)
@@ -1160,14 +1230,14 @@ def qwik_service_publish(vehicle=None, vehicle_id=None,
frappe.throw('failed to publish')
qwik_service_tmp = get_latest_data(
{'data': qwik_service_tmp}, {'data': []})
- frappe.log_error(str(qwik_service_tmp))
+ # frappe.log_error(str(qwik_service_tmp))
vehi_data = create_df(qwik_service_tmp)
latest_pub_data['data'] = vehi_data
if publish_type == "Global":
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
#Remove Internal Full Update File On Global Publish
existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-qwik_service_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
+ # frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
""" Save publish file """
@@ -1258,12 +1328,9 @@ def special_tool_publish(vehicle, vehicle_id,
internal_full_update_check = folder_url + "/" +"Internal_Full_Update"
check_inter_full_update_file = internal_full_update_check +"/"+ \
vehicle.replace(' ', '-') + '-special_tool' + '.json'
-
- frappe.log_error(str(check_inter_full_update_file))
-
# check if Internal Full Update Already Exist
if os.path.isfile(check_inter_full_update_file):
- frappe.log_error('inside_full_update')
+ # frappe.log_error('inside_full_update')
with open(check_inter_full_update_file) as f:
published_data = json.load(f)
for i in parent:
@@ -1278,7 +1345,6 @@ def special_tool_publish(vehicle, vehicle_id,
special_tool_tmp = get_latest_data({'data': published_data['data']}, {
'data': special_tool_tmp})
else:
-
for i in parent:
special_tool = special_tool_data(
vehicle, publish_type, i.parent1)
@@ -1314,11 +1380,11 @@ def special_tool_publish(vehicle, vehicle_id,
def torque_manual_publish(vehicle=None, vehicle_id=None,
- language=None, publish_type=None,
- release_description=None, parent=None,
- variant=None):
+ language=None, publish_type=None,
+ release_description=None, parent=None,
+ variant=None):
try:
- logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}'
+ logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant} - torque_publish_start'
logger = frappe.logger(logger_file,
allow_site=True, file_count=100)
logger.info(
@@ -1340,26 +1406,28 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
latest_pub_data = vehicle_data.copy()
'''Publish Ready Flags'''
- publish_torque_manual= 0
+ publish_torque_manual = 0
torque_manual_tmp = []
'''Create Folder For Publish'''
create_publish_folders(folder_url)
- full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
+ full_update_folder_url = folder_url + "/" + \
+ "{}".format(publish_type)+"_Full_Update"
file_path = folder_url + "/" + publish_type + "/" + \
vehicle.replace(' ', '-') + '-torque_manual' + '.json'
global_file_path = folder_url + "/" + 'Global' + "/" + \
vehicle.replace(' ', '-') + '-torque_manual' + '.json'
- full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
- # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
+ full_update_file = full_update_folder_url+"/" + \
+ vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
global_full_update_path = folder_url + "/" + "Global_Full_Update"
- check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
+ check_glbl_full_updte = global_full_update_path+"/" + \
+ vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
'''Append Published Data to Json'''
- logger.info(
- f'Feature Data Append Start::{vehicle}-{language}-{publish_type}')
+ # logger.info(
+ # f'Feature Data Append Start::{vehicle}-{language}-{publish_type}')
'''update existing global json file'''
if os.path.isfile(global_file_path) and publish_type == 'Internal':
if os.path.isfile(check_glbl_full_updte):
@@ -1367,7 +1435,7 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
published_data = json.load(f)
for i in parent:
torque_manual = torque_manual_data(vehicle, language,
- publish_type, i.parent1, i.variant)
+ publish_type, i.parent1, i.variant)
if torque_manual['status'] == 1:
publish_torque_manual = 1
torque_manual_tmp.append(
@@ -1384,18 +1452,20 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, torque_manual_tmp)
elif os.path.isfile(file_path) and publish_type == 'Global':
- frappe.log_error('inside')
+ # frappe.log_error('inside')
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
- #Remove Internal Full Update File On Global Publish
- existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
+ # Remove Internal Full Update File On Global Publish
+ existing_internal_full_update = full_update_folder_url_internal+"/" + \
+ vehicle.replace(' ', '-') + \
+ '-torque_manual_full_update' + '.json'
+ # frappe.log_error('file_path' + str(existing_internal_full_update))
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
with open(check_glbl_full_updte) as f:
published_data = json.load(f)
for i in parent:
torque_manual = torque_manual_data(vehicle, language,
- publish_type, i.parent1, i.variant)
+ publish_type, i.parent1, i.variant)
if torque_manual['status'] == 1:
publish_torque_manual = 1
torque_manual_tmp.append(
@@ -1411,15 +1481,17 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
vehi_data = add_new_val(
vehi_data, new_variant_name, torque_manual_tmp)
else:
- #Existing Global or Internal Full Update
-
- internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
- check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
+ # Existing Global or Internal Full Update
+
+ internal_full_update_check = full_update_folder_url = folder_url + \
+ "/" + "Internal_Full_Update"
+ check_full_update_file = internal_full_update_check+"/" + \
+ vehicle.replace(' ', '-') + \
+ '-torque_manual_full_update' + '.json'
if os.path.isfile(check_full_update_file):
with open(check_full_update_file) as f:
published_data = json.load(f)
for i in parent:
- frappe.log_error("i",str(parent))
torque_manual = torque_manual_data(
vehicle, language, publish_type, i.parent1, i.variant)
if torque_manual['status'] == 1:
@@ -1434,7 +1506,7 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
latest_pub_data['data'] = torque_manual_tmp
vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
- 'data': torque_manual_tmp})
+ 'data': torque_manual_tmp})
if vehi_data:
find_distinct = set(exisitng_var)
new_variant_name = [
@@ -1443,46 +1515,45 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
vehi_data, new_variant_name, torque_manual_tmp)
vehicle_data['data'] = vehi_data
-
- #First Time Global Or Internal
+
+ # First Time Global Or Internal
else:
for i in parent:
- frappe.log_error("619",str(parent))
+
torque_manual = torque_manual_data(
vehicle, language, publish_type, i.parent1, i.variant)
-
+
if torque_manual['status'] == 1:
publish_torque_manual = 1
- if len(torque_manual['data'])>0:
+ if len(torque_manual['data']) > 0:
torque_manual_tmp.append(
torque_manual['data'][0])
else:
frappe.throw('failed to publish')
torque_manual_tmp = get_latest_data(
{'data': torque_manual_tmp}, {'data': []})
- frappe.log_error(str(torque_manual_tmp))
+
vehi_data = create_df(torque_manual_tmp)
latest_pub_data['data'] = vehi_data
if publish_type == "Global":
full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
- #Remove Internal Full Update File On Global Publish
- existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-torque_manual_full_update' + '.json'
- frappe.log_error('file_path' + str(existing_internal_full_update))
+ # Remove Internal Full Update File On Global Publish
+ existing_internal_full_update = full_update_folder_url_internal+"/" + \
+ vehicle.replace(' ', '-') + \
+ '-torque_manual_full_update' + '.json'
+
if os.path.isfile(existing_internal_full_update):
os.remove(existing_internal_full_update)
""" Save publish file """
vehicle_data['data'] = vehi_data
-
+
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(latest_pub_data, indent=4, default=str))
-
with open(full_update_file, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
-
-
-
+
logger.info(
f'Torque Manual Data Append Start::{vehicle}-{language}-{publish_type}')
@@ -1493,191 +1564,9 @@ def torque_manual_publish(vehicle=None, vehicle_id=None,
f'{vehicle} - {language} - {publish_type} - {module_name} error in json creation' + str(e))
frappe.throw('Failed To Publish')
-# def training_information_publish(vehicle=None, vehicle_id=None,
-# language=None, publish_type=None,
-# release_description=None,variant=None,parent=None):
-
-# try:
-# logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant}'
-# logger = frappe.logger(logger_file,
-# allow_site=True, file_count=100)
-# logger.info(
-# f"start of fetching Training Information data - {vehicle} - {language} - {module_name} - {variant}")
-
-# folder_url = base_url+"/"+vehicle.replace(' ', '-') + "/"+language
-# logger.info(f'Created Folder-{vehicle}-{language}-{publish_type}')
-
-# '''Final Json'''
-# vehicle_data = {
-# 'vehicle': vehicle,
-# 'vehicle_myid': vehicle_id,
-# 'publish_type': publish_type,
-# 'publish_description': release_description,
-# 'publish_language': language,
-# 'data': ''
-# }
-
-# latest_pub_data = vehicle_data.copy()
-
-# '''Publish Ready Flags'''
-# publish_training_information = 0
-# training_information_tmp = []
-
-# '''Create Folder For Publish'''
-
-# create_publish_folders(folder_url)
-# full_update_folder_url = folder_url + "/" + "{}".format(publish_type)+"_Full_Update"
-# file_path = folder_url + "/" + publish_type + "/" + \
-# vehicle.replace(' ', '-') + '-training_information' + '.json'
-# global_file_path = folder_url + "/" + 'Global' + "/" + \
-# vehicle.replace(' ', '-') + '-training_information' + '.json'
-
-# full_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
-# # full_internal_update_file = full_update_folder_url+"/" + vehicle.replace(' ', '-') + '-feature_finder_full_update' + '.json'
-# global_full_update_path = folder_url + "/" + "Global_Full_Update"
-# check_glbl_full_updte = global_full_update_path+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
-
-# '''Append Published Data to Json'''
-
-# logger.info(
-# f'Training Data Append Start::{vehicle}-{language}-{publish_type}')
-# '''update existing global json file'''
-# if os.path.isfile(global_file_path) and publish_type == 'Internal':
-# if os.path.isfile(check_glbl_full_updte):
-# with open(check_glbl_full_updte) as f:
-# published_data = json.load(f)
-# for i in parent:
-# training_information = training_information_data(vehicle, language,
-# publish_type, i.parent1)
-# if training_information['status'] == 1:
-# publish_training_information = 1
-# training_information_tmp.append(
-# training_information['data'][0])
-# training_information_tmp = create_df(training_information_tmp)
-# latest_pub_data['data'] = training_information_tmp
-# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
-# 'data': training_information_tmp})
-# if vehi_data:
-# find_distinct = set(exisitng_var)
-# new_variant_name = [
-# x for x in new_variant if x not in find_distinct]
-# vehi_data = add_new_val(
-# vehi_data, new_variant_name,training_information_tmp)
-
-# elif os.path.isfile(file_path) and publish_type == 'Global':
-# frappe.log_error('inside')
-# full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
-# #Remove Internal Full Update File On Global Publish
-# existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
-# frappe.log_error('file_path' + str(existing_internal_full_update))
-# if os.path.isfile(existing_internal_full_update):
-# os.remove(existing_internal_full_update)
-# with open(check_glbl_full_updte) as f:
-# published_data = json.load(f)
-# for i in parent:
-# training_information = training_information_data(vehicle, language,
-# publish_type, i.parent1)
-# if training_information['status'] == 1:
-# publish_training_information = 1
-# training_information_tmp.append(
-# training_information['data'][0])
-# # training_information_tmp = create_df(training_information_tmp)
-# latest_pub_data['data'] = training_information_tmp
-# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
-# 'data': training_information_tmp})
-# if vehi_data:
-# find_distinct = set(exisitng_var)
-# new_variant_name = [
-# x for x in new_variant if x not in find_distinct]
-# vehi_data = add_new_val(
-# vehi_data,new_variant_name, training_information_tmp)
-# else:
-# #Existing Global or Internal Full Update
-
-# internal_full_update_check = full_update_folder_url = folder_url + "/" +"Internal_Full_Update"
-# check_full_update_file = internal_full_update_check+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
-# if os.path.isfile(check_full_update_file):
-# with open(check_full_update_file) as f:
-# published_data = json.load(f)
-# for i in parent:
-# frappe.log_error("i",str(parent))
-# training_information = training_information_data(
-# vehicle, language, publish_type, i.parent1)
-# if training_information['status'] == 1:
-# publish_training_information = 1
-
-# if len(training_information['data']):
-# training_information_tmp.append(
-# training_information['data'][0])
-# else:
-# frappe.throw('failed to publish')
-# # training_information_tmp = create_df(training_information_tmp)
-# latest_pub_data['data'] = training_information_tmp
-
-# vehi_data = compare_get_data({'data': latest_pub_data['data']}, {
-# 'data': training_information_tmp})
-# if vehi_data:
-# find_distinct = set(exisitng_var)
-# new_variant_name = [
-# x for x in new_variant if x not in find_distinct]
-# vehi_data = add_new_val(
-# vehi_data, new_variant_name,training_information_tmp)
-
-# vehicle_data['data'] = vehi_data
-
-# #First Time Global Or Internal
-# else:
-# for i in parent:
-# training_information = training_information_data(
-# vehicle, language, publish_type, i.parent1)
-
-# if training_information['status'] == 1:
-# publish_training_information = 1
-# if len(training_information['data'])>0:
-# training_information_tmp.append(
-# training_information['data'][0])
-# else:
-# frappe.throw('failed to publish')
-# training_information_tmp = get_latest_data(
-# {'data': training_information_tmp}, {'data': []})
-# frappe.log_error(str(training_information_tmp))
-# # vehi_data = create_df(training_information_tmp)
-# latest_pub_data['data'] = training_information_tmp
-
-# if publish_type == "Global":
-# full_update_folder_url_internal = folder_url + "/" + "Internal_Full_Update"
-# #Remove Internal Full Update File On Global Publish
-# existing_internal_full_update = full_update_folder_url_internal+"/" + vehicle.replace(' ', '-') + '-training_information_full_update' + '.json'
-# frappe.log_error('file_path' + str(existing_internal_full_update))
-# if os.path.isfile(existing_internal_full_update):
-# os.remove(existing_internal_full_update)
-# """ Save publish file """
-# vehicle_data['data'] = training_information_tmp
-
-# with open(file_path, 'w') as outfile:
-# outfile.write(json.dumps(latest_pub_data, indent=4, default=str))
-
-
-# with open(full_update_file, 'w') as outfile:
-# outfile.write(json.dumps(vehicle_data, indent=4, default=str))
-
-
-
-# logger.info(
-# f'Training Information Data Append Start::{vehicle}-{language}-{publish_type}')
-
-# return 1, file_path
-
-# except Exception as e:
-# logger.info(
-# f'{vehicle} - {language} - {publish_type} - {module_name} error in json creation' + str(e))
-
-# frappe.throw('Failed To Publish')
-
-
def training_information_publish(vehicle, vehicle_id,
- language, publish_type, release_description,
- variant, parent):
+ language, publish_type, release_description,
+ variant, parent):
try:
logger_file = f'{vehicle} - {language} - {publish_type} - training information'
logger = frappe.logger(logger_file, allow_site=True, file_count=100)
@@ -1720,46 +1609,52 @@ def training_information_publish(vehicle, vehicle_id,
published_data = json.load(f)
for i in parent:
training_information = training_information_data(vehicle, language,
- publish_type, i.parent1)
+ publish_type, i.parent1)
+
if training_information['status'] == 1:
publish_training_information = 1
training_information_tmp.append(
training_information['data'][0])
+ else:
+ frappe.throw(str('failed to publish'))
training_information_tmp = get_latest_data({'data': published_data['data']}, {
- 'data': training_information_tmp})
+ 'data': training_information_tmp})
elif os.path.isfile(global_file_path) and publish_type == 'Global':
with open(global_file_path) as f:
published_data = json.load(f)
for i in parent:
training_information = training_information_data(vehicle, language,
- publish_type, i.parent1)
+ publish_type, i.parent1)
if training_information['status'] == 1:
publish_training_information = 1
training_information_tmp.append(
training_information['data'][0])
+ else:
+ frappe.throw(str('failed to publish'))
training_information_tmp = get_latest_data({'data': published_data['data']}, {
- 'data': training_information_tmp})
+ 'data': training_information_tmp})
if os.path.isfile(check_inter_file):
- frappe.log_error(str('internal aleady exists'))
with open(check_inter_file) as f:
published_data = json.load(f)
for i in parent:
training_information = training_information_data(vehicle, language,
- publish_type, i.parent1)
+ publish_type, i.parent1)
if training_information['status'] == 1:
publish_training_information = 1
training_information_tmp1.append(
training_information['data'][0])
- frappe.log_error('before' + str(training_information_tmp1))
+ else:
+ frappe.throw(str('failed to publish'))
+
training_information_tmp = get_latest_data({'data': published_data['data']}, {
- 'data': training_information_tmp1})
-
+ 'data': training_information_tmp1})
+
else:
- frappe.log_error(str('new internal publish'))
+ # frappe.log_error(str('new internal publish'))
for i in parent:
training_information = training_information_data(
vehicle, language, publish_type, i.parent1)
@@ -1772,19 +1667,22 @@ def training_information_publish(vehicle, vehicle_id,
# frappe.msgprint(str(repair_checksheet['data'][0]))
training_information_tmp = get_latest_data(
{'data': training_information_tmp}, {'data': []})
- frappe.log_error('new pub data' +str(training_information_tmp))
+ # frappe.log_error('new pub data' + str(training_information_tmp))
if publish_type == "Global":
- #Remove Internal File On Global Publish
- internal_check = folder_url + "/" + 'Internal' + "/" + \
- vehicle.replace(' ', '-') + '-training_information' + '.json'
+ # Remove Internal File On Global Publish
+ internal_check = folder_url + "/" + 'Internal' + "/" + \
+ vehicle.replace(' ', '-') + '-training_information' + '.json'
if os.path.isfile(internal_check):
os.remove(internal_check)
if publish_training_information == 1:
""" Save publish file """
vehicle_data['data'] = training_information_tmp
- frappe.log_error(str(file_path))
+ # frappe.log_error(str(file_path))
+
+ with open(file_path, 'w') as outfile:
+ outfile.write(json.dumps(vehicle_data, indent=4, default=str))
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(vehicle_data, indent=4, default=str))
@@ -1796,6 +1694,7 @@ def training_information_publish(vehicle, vehicle_id,
f'{vehicle} - {language} - {publish_type} error in json creation' + str(e))
frappe.throw('Failed To Publish')
+
def get_key_value_data(data):
module_dic = {}
for d in data['data']:
@@ -2127,20 +2026,21 @@ def special_tool_data(vehicle=None,
logger.error('error in special tool' + str(e))
return failure_reponse
+
def torque_manual_data(vehicle=None, language=None,
- publish_type=None, parent=None, variant=None):
+ publish_type=None, parent=None, variant=None):
try:
- logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant} - fetch data'
+ logger_file = f'{vehicle} - {language} - {publish_type} - {module_name} - {variant} - torque fetch data'
logger = frappe.logger(logger_file,
allow_site=True, file_count=100)
logger.info(
f"start of fetching torque manual data - {vehicle} - {language} - {variant} - {module_name}")
torque_manual_details = frappe.db.sql('''select name,variant,vehicle,language,active_status,category,sub_category,display_order,keywords,my_id
- from `tabTorque Manual` where variant ='%s' && language= '%s';''' % (variant,language), as_dict=1)
+ from `tabTorque Manual` where variant ='%s' && language= '%s';''' % (variant, language), as_dict=1)
for t in torque_manual_details:
- t['torque_table']= frappe.db.sql(''' select idx as 'display_order',fastener_description,specification,qty,torque_nm,remark from
- `tabTorque Manual Details` where parent = '%s' order by display_order;''' % (t['name']),as_dict=1)
-
+ t['torque_table'] = frappe.db.sql('''select name,col1,col2,col3,col4,col5,col6,col7,col8,col9,col10,idx as display_order from
+ `tabTorque Manual Details` where parent = '%s' order by idx;''' % (t['name']), as_dict=1)
+
logger.info(
f"end of Torque Manual data {variant} - {language}")
success_reponse['data'] = torque_manual_details
@@ -2153,10 +2053,10 @@ def torque_manual_data(vehicle=None, language=None,
e)
logger.error('error in Torque Manual' + str(e))
return failure_reponse
-
+
def training_information_data(vehicle=None, language=None,
- publish_type=None, parent=None,variant=None):
+ publish_type=None, parent=None, variant=None):
try:
logger_file = f'{vehicle} - {language} - {publish_type} - training_information_data'
logger = frappe.logger(logger_file,
@@ -2169,14 +2069,15 @@ def training_information_data(vehicle=None, language=None,
and language = '{language}'and name='{parent}' """, as_dict=1)
for d in training_information_details:
- d['Content'] = frappe.db.sql(f"""select idx as 'display_order',topic,content from `tabTraining Information Content Reference`
- where parent = '{d['name']}'
- order by display_order ;""", as_dict=1)
+ d['content'] = frappe.db.sql(f"""select idx as 'display_order',topic as file_type,case when topic != 'SCROM' then file_url else scrom_file_url end as file_url,
+ file_name from `tabTraining Information Content Reference` where parent = '{d['name']}'
+ order by display_order ;""", as_dict=1)
logger.info(
f"end of fetching training informataion data {vehicle} - {language}")
success_reponse['data'] = training_information_details
- success_reponse['message'] = f'Training Information Fecthed Succesfully for {vehicle} - {language} - training_information_data '
+ success_reponse[
+ 'message'] = f'Training Information Fecthed Succesfully for {vehicle} - {language} - training_information_data '
return success_reponse
except Exception as e:
@@ -2225,6 +2126,7 @@ def cal_ver_new_module(vehicle, lang, publish_type, doc=None):
except Exception as e:
return {"status": 0, "data": "None", "error": str(e)}
+
@frappe.whitelist()
def get_service_repair(vehicle,
language_label, publish_type):
@@ -2233,6 +2135,7 @@ def get_service_repair(vehicle,
''', as_dict=1)
return data
+
@frappe.whitelist()
def get_feature_finder(vehicle=None, variant=None, language_label=None):
try:
@@ -2242,14 +2145,17 @@ def get_feature_finder(vehicle=None, variant=None, language_label=None):
except Exception as e:
return str(e)
+
@frappe.whitelist()
-def get_training_information(vehicle=None,language_label=None):
+def get_training_information(vehicle=None, language_label=None):
try:
data = frappe.db.sql('''
- select * from `tabTraining Information` where vehicle = '%s' and language='%s' and is_published = '%s';''' % (vehicle,language_label,0), as_dict=1)
+ select * from `tabTraining Information` where vehicle = '%s' and language='%s' and is_published = '%s';''' % (vehicle, language_label, 0), as_dict=1)
return data
except Exception as e:
return str(e)
+
+
@frappe.whitelist()
def get_qwik_service(variant=None, language_label=None):
try:
@@ -2272,7 +2178,35 @@ def get_special_tool(vehicle=None):
def get_torque_manual(variant=None, language_label=None):
try:
data = frappe.db.sql('''
- select * from `tabTorque Manual` where variant='%s' and language='%s' and is_published='%s';''' % (variant,language_label,0), as_dict=1)
+ select * from `tabTorque Manual` where variant='%s' and language='%s' and is_published='%s';''' % (variant, language_label, 0), as_dict=1)
return data
except Exception as e:
- return str(e)
\ No newline at end of file
+ return str(e)
+
+
+def extract_scrom_package(data, vehicle):
+ try:
+ base_path = BASE_URL_PUBLIC + "/" + "training" + "/" + vehicle
+ scrom_path = base_path + "/" + "scrom"
+
+ for d in data:
+ if d['content']:
+ for ch in d['content']:
+ if ch['topic'] == 'SCROM' and ch['content'] != None:
+ scrom_folder_path = base_path + "/" + "scrom" + \
+ "/" + ch['content'].split("/")[2].split(".")[0]
+ if not os.path.exists(scrom_folder_path):
+ os.makedirs(scrom_folder_path)
+ if ch['content']:
+
+ with zipfile.ZipFile(BASE_URL_PUBLIC+ch['content'], 'r') as zip_ref:
+ zip_ref.extractall(path=scrom_folder_path)
+ ch['base_path'] = scrom_folder_path + \
+ ch['content'].split("/")[2].split(".")[0]
+ ch['file_url'] = ch['base_path']+"/"+ch['file_name']
+ update_base_path = frappe.db.sql(
+ f"""update `tabTraining Information Content Reference` set base_path = '{ch['base_path']}' and file_url ='{ch['file_url']}' where name='{ch['content']}'""")
+ return {"success": 1, "data": data}
+
+ except Exception as e:
+ return "Extarction Failed"+str(e)