update python api v2
This commit is contained in:
parent
54b1aa42ef
commit
168c85dfc3
1 changed files with 436 additions and 40 deletions
|
|
@ -37,6 +37,14 @@ holdonPD = ""
|
|||
global isJson
|
||||
isJson = False
|
||||
|
||||
|
||||
# Keys to be removed
|
||||
removed_keylist = [
|
||||
"RemainingCups",
|
||||
"RemainingCupsWithTopping"
|
||||
]
|
||||
|
||||
|
||||
# Set value of nested map (target - master_json) by using keys from 'key_list' and assigned value to it.
|
||||
def set_value_in_nested_map(key_list: list, value):
|
||||
reduce(getitem, key_list[:-1], master_json)[key_list[-1]] = value
|
||||
|
|
@ -49,8 +57,15 @@ def get_value_in_nested_map(target_json: dict, key_list: list, isMaster=False):
|
|||
if "SubMenu" in key_list:
|
||||
if len(reduce(getitem, key_list[:3], target_json)) <= 0:
|
||||
return "-"
|
||||
|
||||
return reduce(getitem, key_list[:-1], target_json)[key_list[-1]]
|
||||
|
||||
# Handle no key case
|
||||
result = None
|
||||
try:
|
||||
result = reduce(getitem, key_list[:-1], target_json)[key_list[-1]]
|
||||
except:
|
||||
print(key_list, "->",result)
|
||||
pass
|
||||
return result
|
||||
|
||||
# Decode key that was in form of String to the list
|
||||
def decode_path(str_with_dot: str) -> list:
|
||||
|
|
@ -66,7 +81,7 @@ def decode_path(str_with_dot: str) -> list:
|
|||
|
||||
def fetch_pd(str_path: str, target_dict: dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("productCode")
|
||||
keyList.append("productCode" if "Recipe01" in str_path else "id")
|
||||
# print("decode and append get : ",keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
|
|
@ -95,6 +110,261 @@ def create_map(events_action: str, log: str, additional=[]) -> str:
|
|||
"addition": additional
|
||||
}
|
||||
|
||||
|
||||
def flatten(x, parent='', sep='.') -> dict:
|
||||
flattened = {}
|
||||
for key, value in x.items():
|
||||
current_key = f"{parent}{sep}{key}" if parent else key
|
||||
if isinstance(value, dict):
|
||||
flattened.update(flatten(value, current_key, sep))
|
||||
elif isinstance(value, list):
|
||||
for index, item in enumerate(value):
|
||||
list_key = f"{current_key}{sep}{index}"
|
||||
|
||||
if isinstance(item, dict):
|
||||
flattened.update(flatten(item, list_key, sep))
|
||||
else:
|
||||
flattened.update({list_key: item})
|
||||
else:
|
||||
flattened[current_key] = value
|
||||
return flattened
|
||||
|
||||
def unflatten(dictionary):
|
||||
result_dict = {}
|
||||
for key, value in dictionary.items():
|
||||
parts = key.split('.')
|
||||
d = result_dict
|
||||
for part in parts[:-1]:
|
||||
if part.isdigit():
|
||||
part = int(part)
|
||||
if isinstance(d, list):
|
||||
while len(d) <= part:
|
||||
d.append({})
|
||||
d = d[part]
|
||||
else:
|
||||
if part not in d:
|
||||
d[part] = []
|
||||
d = d[part]
|
||||
else:
|
||||
if part not in d:
|
||||
d[part] = {}
|
||||
d = d[part]
|
||||
if parts[-1].isdigit():
|
||||
parts[-1] = int(parts[-1])
|
||||
while len(d) <= parts[-1]:
|
||||
d.append(None)
|
||||
d[parts[-1]] = value
|
||||
else:
|
||||
d[parts[-1]] = value
|
||||
return result_dict
|
||||
|
||||
|
||||
def diff(dict1, dict2):
|
||||
exist_in_dict1 = set(dict1.keys()) - set(dict2.keys())
|
||||
exist_in_dict2 = set(dict2.keys()) - set(dict1.keys())
|
||||
|
||||
keydiff = {
|
||||
key for key in set(dict1.keys()) & set(dict2.keys()) if dict1[key] != dict2[key]
|
||||
}
|
||||
|
||||
# diff
|
||||
diff1 = {key: dict1[key] for key in exist_in_dict1}
|
||||
diff2 = {key: dict2[key] for key in exist_in_dict2}
|
||||
|
||||
diffval = {key: (dict1[key], dict2[key]) for key in keydiff}
|
||||
|
||||
return diff1, diff2, diffval
|
||||
|
||||
def get_key(d, target) -> str:
|
||||
for key, value in d.items():
|
||||
if value == target:
|
||||
return key
|
||||
return None
|
||||
|
||||
def remove_key(d, key):
|
||||
if isinstance(d, dict):
|
||||
for key1 in list(d.keys()):
|
||||
if key1 == key:
|
||||
del d[key1]
|
||||
else:
|
||||
remove_key(d[key1], key)
|
||||
elif isinstance(d, list):
|
||||
for index, item in enumerate(d):
|
||||
if isinstance(item, dict):
|
||||
if key in list(item.keys()):
|
||||
del item[key]
|
||||
|
||||
if "SubMenu" in list(item.keys()):
|
||||
|
||||
for j, itemSub in enumerate(item["SubMenu"]):
|
||||
if key in list(itemSub.keys()):
|
||||
del itemSub[key]
|
||||
|
||||
def string_to_date(str_date: str):
|
||||
time_obj = datetime.datetime.strptime(str_date, "%d-%b-%Y, %H:%M:%S").time() if "," in str_date else datetime.datetime.strptime(str_date, "%d-%b-%Y %H:%M:%S").time()
|
||||
return time_obj
|
||||
|
||||
def CompareDate(master_lastchange, dev_lastchange) -> str:
|
||||
if master_lastchange > dev_lastchange:
|
||||
return "DevLegacy"
|
||||
elif master_lastchange == dev_lastchange:
|
||||
return "NoChange"
|
||||
elif master_lastchange < dev_lastchange:
|
||||
return "DevRelease"
|
||||
|
||||
def GetMasterDevCompare(key_list: list, dev_date: any) -> bool:
|
||||
master_date = string_to_date(get_value_in_nested_map(master_json, decode_path(key_list), True))
|
||||
|
||||
comp = CompareDate(master_date, string_to_date(dev_date))
|
||||
|
||||
if comp == "DevLegacy" or comp == "NoChange":
|
||||
return False
|
||||
elif comp == "DevRelease":
|
||||
return True
|
||||
|
||||
# ------------------------------------ HMTL --------------------------------------
|
||||
|
||||
def blinking_text():
|
||||
return """
|
||||
<style>
|
||||
.blink {
|
||||
animation: blink-animation .25s linear infinite alternate;
|
||||
-webkit-animation: blink-animation .25s linear infinite alternate;
|
||||
}
|
||||
@keyframes blink-animation {
|
||||
0% {
|
||||
opacity: 100%
|
||||
}
|
||||
25% {
|
||||
opacity: 75%
|
||||
}
|
||||
50% {
|
||||
opacity: 50%
|
||||
}
|
||||
75% {
|
||||
opacity: 25%
|
||||
}
|
||||
100% {
|
||||
opacity: 0%
|
||||
}
|
||||
}
|
||||
@-webkit-keyframes blink-animation {
|
||||
0% {
|
||||
opacity: 100%
|
||||
}
|
||||
25% {
|
||||
opacity: 75%
|
||||
}
|
||||
50% {
|
||||
opacity: 50%
|
||||
}
|
||||
75% {
|
||||
opacity: 25%
|
||||
}
|
||||
100% {
|
||||
opacity: 0%
|
||||
}
|
||||
}
|
||||
</style>
|
||||
"""
|
||||
|
||||
def p(content:str, isAction: bool) -> str:
|
||||
pclass = ""
|
||||
style = "style=\"float: left; margin: 15; padding: 10px; "
|
||||
|
||||
# style
|
||||
if isAction:
|
||||
|
||||
if "CHANGE" in content:
|
||||
|
||||
style += """
|
||||
background: greenyellow;
|
||||
"""
|
||||
elif "INSERT" in content:
|
||||
style += """
|
||||
background: orange;
|
||||
"""
|
||||
else:
|
||||
if "VER" in content:
|
||||
style += """
|
||||
background: aliceblue;
|
||||
"""
|
||||
elif "COUNT" in content or "OUTPUT" in content or "LOG" in content:
|
||||
style += """
|
||||
background: aquamarine;
|
||||
"""
|
||||
elif "master" in content or "dev" in content:
|
||||
style += """
|
||||
box-shadow: 2px 2px;
|
||||
"""
|
||||
if "master" in content:
|
||||
style += "background: lightgray; margin-right: -20px;"
|
||||
else:
|
||||
style += "background: lightblue;"
|
||||
elif "New key" in content or "New value" in content:
|
||||
style += """
|
||||
border-radius: 25px;
|
||||
"""
|
||||
pclass = "class=\"blink\" "
|
||||
if "New key" in content:
|
||||
style += "background: lightgreen;"
|
||||
else:
|
||||
style += "background: gold;"
|
||||
|
||||
|
||||
style += "\">"
|
||||
|
||||
return "\t\t<p "+pclass+style+content+"</p>\n"
|
||||
|
||||
|
||||
def searchFnForHtml():
|
||||
return """
|
||||
<script>
|
||||
function searchLog(){
|
||||
var input, filter, logBody, logRow, logP, logContent;
|
||||
input = document.getElementById("searchInput");
|
||||
filter = input.value;
|
||||
logBody = document.getElementById("logBody");
|
||||
logRow = logBody.getElementsByTagName("div");
|
||||
|
||||
|
||||
for(let i = 0; i < logRow.length; i++){
|
||||
p = logRow[i].getElementsByTagName("p");
|
||||
console.log("Total P = ", logRow[i].getElementsByTagName("p").length);
|
||||
|
||||
for(let p_i = 0; p_i < p.length; p_i++){
|
||||
pcontent = p[p_i].textContent || p[p_i].innerText;
|
||||
|
||||
if(pcontent.includes(filter)){
|
||||
logRow[i].style.display = "";
|
||||
break;
|
||||
} else {
|
||||
logRow[i].style.display = "none";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
"""
|
||||
|
||||
def searchInputForHTML():
|
||||
return """
|
||||
<input type="text" id="searchInput" onkeyup="searchLog()" placeholder="Search logs ... " style="top: 0; position: fixed; width: 100%; font-size: 16px; padding: 20px; margin: 10px; border: 1px solid">
|
||||
"""
|
||||
|
||||
def mergeV3pre(flat_dict_base:dict, flat_dict_dev:dict) -> dict:
|
||||
"""
|
||||
`version 3 preview`
|
||||
|
||||
Merge 2 dictionary files together. Given that the dict must be flattened.
|
||||
|
||||
`Warning` This will overwrite the exist key with new value.
|
||||
"""
|
||||
merged = flat_dict_base.copy()
|
||||
merged.update(flat_dict_dev)
|
||||
return merged
|
||||
|
||||
|
||||
# Merge diff value or append it to the main file.
|
||||
# 0 - master_path = master.json / to be merged
|
||||
# 1 - dev_path = dev.json / to merge into another
|
||||
|
|
@ -103,6 +373,8 @@ def create_map(events_action: str, log: str, additional=[]) -> str:
|
|||
# 4 - debug = enable debug
|
||||
def merge(args):
|
||||
"""
|
||||
`version 2`
|
||||
|
||||
Merge the 2 json files together with input of target paths, output path and changefile
|
||||
|
||||
:param args: A list of arguments. list[]
|
||||
|
|
@ -144,12 +416,15 @@ def merge(args):
|
|||
|
||||
#
|
||||
if (os.path.exists(master_path) == False) and str(master_path).isdigit():
|
||||
master_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(master_path) + ".json"
|
||||
if "/coffeethai02" not in master_path:
|
||||
master_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(master_path) + ".json"
|
||||
|
||||
|
||||
master_file = open(master_path, 'rb')
|
||||
|
||||
if (os.path.exists(dev_path) == False) and str(dev_path).isdigit():
|
||||
dev_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(dev_path) + ".json"
|
||||
if "/coffeethai02" not in dev_path:
|
||||
dev_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(dev_path) + ".json"
|
||||
|
||||
dev_file = open(dev_path, 'rb')
|
||||
|
||||
|
|
@ -186,6 +461,7 @@ def merge(args):
|
|||
dev_json = dev_json_file.copy()
|
||||
|
||||
config_ver = master_json["MachineSetting"]["configNumber"]
|
||||
print(config_ver)
|
||||
|
||||
global pdchange
|
||||
pdchange = 0
|
||||
|
|
@ -193,8 +469,36 @@ def merge(args):
|
|||
pdadd = 0
|
||||
# global holdonPD
|
||||
holdonPD = ""
|
||||
|
||||
# Step 1: Flatten the dictionary
|
||||
flattened_master = flatten(master_json)
|
||||
flattened_dev = flatten(dev_json)
|
||||
|
||||
# Step 2: Check key size of flattens
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("master keys = ", len(flattened_master.keys()), ", dev keys = ", len(flattened_dev.keys()))
|
||||
except:
|
||||
pass
|
||||
|
||||
# Step 3: Diff
|
||||
diff1, diff2, diffvals = diff(flattened_master, flattened_dev)
|
||||
# print("Exist in master = ", len(diff1))
|
||||
# print("Exist in dev = ", len(diff2))
|
||||
# print("Unmatched values = ", len(diffvals))
|
||||
|
||||
# m_pre = mergeV3pre(flat_dict_base=flattened_master, flat_dict_dev=flattened_dev)
|
||||
# final_m = unflatten(m_pre)
|
||||
|
||||
# with open("./output/mergev3.json", "w+", encoding="utf-8") as testfile:
|
||||
# json.dump(final_m, testfile, indent=2, ensure_ascii=False)
|
||||
|
||||
# Clean unused key
|
||||
for i, remove_item in enumerate(removed_keylist):
|
||||
remove_key(master_json, remove_item)
|
||||
|
||||
merge_dicts(master_json, dev_json_file)
|
||||
# print(master_json["MachineSetting"]["configNumber"])
|
||||
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("/".join(changefile_path.split("/")[:-1]))
|
||||
|
|
@ -206,34 +510,44 @@ def merge(args):
|
|||
if (os.path.exists("/".join(changefile_path.split("/")[:-1])) == False):
|
||||
os.makedirs("/".join(changefile_path.split("/")[:-1]))
|
||||
|
||||
with open(outfile_path, "w+", encoding="utf-8") as outfile:
|
||||
with open(outfile_path, "w", encoding="utf-8") as outfile:
|
||||
json.dump(master_json, outfile, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
# Create new filename by outfile_path
|
||||
outfile_path, outfile_ext = os.path.splitext(outfile_path)
|
||||
# ignore ext, get the last
|
||||
outfile_path_spl = str(outfile_path).split("/")
|
||||
path_version = outfile_path_spl[len(outfile_path_spl) - 1]
|
||||
|
||||
pv = path_version.split("_")[1]
|
||||
|
||||
|
||||
# Include counts
|
||||
|
||||
events_json.append(create_map(events_action="COUNT", log="Total="+str(pdchange)))
|
||||
events_json.append(create_map(events_action="COUNT", log="Total="+str(pdadd)))
|
||||
events_json.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path))
|
||||
events_json.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
|
||||
events_json.append(create_map(events_action="LOG", log="Log is saved to "+file_path+"_"+pv+".json"))
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+file_path+"_"+pv+".json"+"\n")
|
||||
|
||||
# log json file
|
||||
|
||||
if not os.path.exists(file_path+".json") or os.stat(file_path+".json").st_size == 0:
|
||||
with open(file_path+".json", "w+",encoding="utf-8") as outlogfile:
|
||||
if not os.path.exists(file_path+"_"+pv+".json") or os.stat(file_path+"_"+pv+".json").st_size == 0:
|
||||
with open(file_path+"_"+pv+".json", "w",encoding="utf-8") as outlogfile:
|
||||
json.dump({"logs"+GetDateTimeString()+"*": events_json}, outlogfile, indent=2, ensure_ascii=False)
|
||||
else:
|
||||
print(file_path+".json")
|
||||
logjs:dict = json.loads(open(file_path+".json", encoding="utf-8").read())
|
||||
print(file_path+"_"+pv+".json")
|
||||
logjs:dict = json.loads(open(file_path+"_"+pv+".json", encoding="utf-8").read())
|
||||
logjs["logs"+GetDateTimeString()+"*"] = events_json
|
||||
json.dump(logjs, open(file_path+".json", "w+", encoding="utf-8"), indent=2, ensure_ascii=False)
|
||||
json.dump(logjs, open(file_path+"_"+pv+".json", "w+", encoding="utf-8"), indent=2, ensure_ascii=False)
|
||||
|
||||
# log file
|
||||
with open(file_path+".log", "a+") as outlogfile2:
|
||||
with open(file_path+"_"+pv+".log", "a+") as outlogfile2:
|
||||
try:
|
||||
|
||||
for event in events:
|
||||
|
|
@ -242,34 +556,58 @@ def merge(args):
|
|||
raise Exception(event)
|
||||
|
||||
# Create html version
|
||||
with open(file_path+".html", "a+") as outlogHtml:
|
||||
|
||||
# add version of master to before extension
|
||||
|
||||
html_string = ""
|
||||
if os.path.exists(file_path+"_"+pv+".html"):
|
||||
html_string = ""
|
||||
else:
|
||||
html_string = blinking_text()+searchFnForHtml()+"\n"+searchInputForHTML()+"\n"
|
||||
|
||||
with open(file_path+"_"+pv+".html", "a+") as outlogHtml:
|
||||
html_string += "<div id=\"logBody\" style=\"display: flex; flex-direction: column; margin-top: 100\">\n"
|
||||
for event in events:
|
||||
# Create div
|
||||
# print("Log as list: ",str(event).split("\t"))
|
||||
html_string = "\t<div class=\"flex p-4 bg-stone-300 rounded m-2\">\n"
|
||||
html_string += "\t<div id=\"logRow\" style=\"display: inline-block; "
|
||||
if "REQUEST" in event:
|
||||
html_string += "background: burlywoord\">\n"
|
||||
elif "INSERT" in event:
|
||||
html_string += "background: darkkhaki\">\n"
|
||||
elif "CHANGE" in event:
|
||||
html_string += "background: beige\">\n"
|
||||
else:
|
||||
html_string += "background: antiquewhite\">\n"
|
||||
event_fraction = str(event).split("\t")
|
||||
for i in event_fraction:
|
||||
if i != "" and i != "\n" and i != "---":
|
||||
if "|" in i and not i.endswith("|"):
|
||||
# CHANGE
|
||||
spl_text = i.split("|")
|
||||
html_string += "\t\t<p>"+spl_text[0]+"</p>\n"
|
||||
html_string += "\t\t<p>"+spl_text[1].replace("\n","")+"</p>\n"
|
||||
html_string += p(spl_text[0], False)
|
||||
html_string += p(spl_text[1].replace("\n",""), False)
|
||||
elif ">>>" in i:
|
||||
# INSERT
|
||||
spl_text = i.split(">>>")
|
||||
html_string += "\t\t<p>"+spl_text[0]+"</p>\n"
|
||||
html_string += "\t\t<p>"+spl_text[1].replace("\n","")+"</p>\n"
|
||||
html_string += p(spl_text[0], False)
|
||||
html_string += p(spl_text[1].replace("\n",""), False)
|
||||
elif i.endswith("|"):
|
||||
html_string += "\t\t<p>"+i[:-1]+"</p>\n"
|
||||
|
||||
# Last change
|
||||
html_string += p(i[:-1], False)
|
||||
else:
|
||||
# print("Default = ", i)
|
||||
# Either version, status or others
|
||||
|
||||
html_string += "\t\t<p>"+i.replace("\n","")+"</p>\n"
|
||||
html_string += "\t</div>\n"
|
||||
|
||||
outlogHtml.write(html_string)
|
||||
if "CHANGE" in i or "INSERT" in i:
|
||||
html_string += p(i.replace("\n",""), True)
|
||||
else:
|
||||
html_string += p(i.replace("\n",""), False)
|
||||
|
||||
|
||||
html_string += "\t</div>\n"
|
||||
html_string += "</div>\n"
|
||||
outlogHtml.write(html_string)
|
||||
|
||||
|
||||
|
||||
|
|
@ -299,12 +637,13 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
last_change = value
|
||||
last_change_path = current_path
|
||||
# print("[LastChange] LastChange: ",last_change_path, " value = ",value)
|
||||
|
||||
if isinstance(value, dict) and isinstance(original[key], dict):
|
||||
merge_dicts(original[key], value, current_path)
|
||||
elif isinstance(value, list) and isinstance(original[key], list):
|
||||
merge_lists(original[key], value, current_path)
|
||||
else:
|
||||
# Detect diff
|
||||
# Detect diff and "LastChange must diff"
|
||||
if original[key] != value:
|
||||
if key == "configNumber":
|
||||
# use master version
|
||||
|
|
@ -322,7 +661,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value)))
|
||||
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
else:
|
||||
elif last_change_path != "" and GetMasterDevCompare(last_change_path, last_change):
|
||||
lc = last_change if last_change != "" else pre_timestamp
|
||||
try:
|
||||
if debug == "debug":
|
||||
|
|
@ -344,29 +683,66 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
"master_last_change": (get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True)),
|
||||
"dev_last_change": last_change,
|
||||
"pd": holdonPD,
|
||||
"name": original["name"],
|
||||
"name": original["name"] if "name" in original else "",
|
||||
"fullpath": current_path
|
||||
}
|
||||
]
|
||||
))
|
||||
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\"\n")
|
||||
# elif "MaterialSetting" in current_path:
|
||||
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t[CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\"\n")
|
||||
# elif "Recipe01" not in current_path:
|
||||
# # reset holded pd
|
||||
# holdonPD = None
|
||||
# override original value by value from updated(dev)
|
||||
# if GetMasterDevCompare(last_change_path, last_change):
|
||||
original[key] = value
|
||||
elif "Recipe01" not in current_path and "Topping" not in current_path and "configNumber" not in current_path:
|
||||
holdonPD = None
|
||||
# Increase change tracking number
|
||||
pdchange += 1
|
||||
# Enable this for more logs
|
||||
|
||||
try:
|
||||
if debug == "all":
|
||||
|
||||
|
||||
events.append(
|
||||
GetDateTimeString()+
|
||||
"\t[VER."+(str(config_ver) if config_ver != -1 else "Detecting...")+
|
||||
"]\t[CHANGE]\t"+"\""+current_path+"\"\n")
|
||||
|
||||
events_json.append(create_map(
|
||||
events_action="CHANGE",
|
||||
log="",
|
||||
additional=[
|
||||
{
|
||||
"version": config_ver,
|
||||
"fullpath": current_path,
|
||||
}
|
||||
]
|
||||
))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if debug == "debug":
|
||||
print(events[len(events) - 1])
|
||||
except:
|
||||
pass
|
||||
original[key] = value
|
||||
|
||||
else:
|
||||
events_json.append(create_map(
|
||||
events_action="INSERT+",
|
||||
log="NewKeyValue",
|
||||
additional=[{
|
||||
"version": config_ver,
|
||||
"pd": holdonPD,
|
||||
"pd": holdonPD if holdonPD != None else current_path,
|
||||
"new_value": value,
|
||||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+(holdonPD if holdonPD != None else current_path)+"\" @ "+str(current_path)+("\t\t\t")+"\n")
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Add path --> "+path, " | master: "," dev: ", str(value))
|
||||
|
|
@ -374,6 +750,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
pass
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
|
||||
original[key] = value
|
||||
|
||||
# Merge list - called by `merge_dict`, using when the value is `dict` type
|
||||
|
|
@ -388,7 +765,7 @@ def merge_lists(original, updated, path=""):
|
|||
for i, item in enumerate(updated):
|
||||
current_path = f"{path}.{i}"
|
||||
if isinstance(item, dict):
|
||||
if i < len(original) - 1 and isinstance(original[i], dict):
|
||||
if i <= len(original)-1 and isinstance(original[i], dict):
|
||||
# events.append("Merge dictionary: "+current_path)
|
||||
if path == "Recipe01":
|
||||
j = 0
|
||||
|
|
@ -404,8 +781,18 @@ def merge_lists(original, updated, path=""):
|
|||
shared_master_position = j - i
|
||||
|
||||
# print("Found target index: ", j," (master) ",i," (dev) " ," check matched? ", original[j][key] == item[key], " use key: ", key, " path: ", current_path )
|
||||
merge_dicts(original[j], item, current_path)
|
||||
merge_dicts(original[j], item, current_path)
|
||||
elif path == "MaterialSetting":
|
||||
k = 0
|
||||
key = "id"
|
||||
|
||||
while original[k][key] != item[key] and k < len(original) - 1:
|
||||
k += 1
|
||||
|
||||
shared_master_position = k - i
|
||||
merge_dicts(original[k], item, current_path)
|
||||
else:
|
||||
# found what
|
||||
merge_dicts(original[i], item, current_path)
|
||||
elif item not in original:
|
||||
try:
|
||||
|
|
@ -418,11 +805,19 @@ def merge_lists(original, updated, path=""):
|
|||
log="AppendDict",
|
||||
additional=[{
|
||||
"version": config_ver,
|
||||
"pd": fetch_pd(fetch_onlyMainMenuPath(current_path), master_json),
|
||||
"pd": fetch_pd(current_path, dev_json) if "Topping" not in current_path else current_path,
|
||||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
events.append(
|
||||
GetDateTimeString()+
|
||||
"\t[VER."+str(config_ver)+
|
||||
"]\t\t\t[INSERT]\t"+
|
||||
"\tNew value >>> "+
|
||||
(fetch_pd(fetch_onlyMainMenuPath(current_path), dev_json) if "Topping" not in current_path else current_path)
|
||||
+("@"+current_path if "Topping" not in current_path else "")+
|
||||
(" -> "+fetch_pd(current_path, dev_json) if fetch_pd(current_path, dev_json) != None else "")+
|
||||
("\t\t\t")+"\n")
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
|
@ -441,10 +836,11 @@ def merge_lists(original, updated, path=""):
|
|||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+str(fetch_pd(fetch_onlyMainMenuPath(current_path), master_json))+", "+current_path+("\t\t\t")+"\n")
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
||||
|
||||
def main():
|
||||
command_line = sys.argv[1]
|
||||
print(sys.argv)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue