2023-09-19 17:19:09 +07:00
import datetime
2023-09-20 13:35:36 +07:00
from functools import reduce
2023-09-19 17:19:09 +07:00
import json
2023-09-20 13:35:36 +07:00
from operator import getitem
2023-09-19 17:19:09 +07:00
import sys
2023-09-20 13:35:36 +07:00
import os
import itertools
2023-09-19 17:19:09 +07:00
2023-09-20 13:35:36 +07:00
# /home/pakin/Codes/coffCfg/cofffeemachineConfig/coffeethai02_1550.json
2023-09-19 17:19:09 +07:00
def GetDateTimeString ( ) :
2023-09-20 13:35:36 +07:00
now = datetime . datetime . now ( ) # current date and time
2023-09-19 17:19:09 +07:00
date_time = now . strftime ( " %d - % b- % Y, % H: % M: % S " )
return date_time
2023-09-20 13:35:36 +07:00
HomeDirectory = " /home/pakin "
CoffeeRecipeDirectory = " /home/pakin/Codes/coffCfg/cofffeemachineConfig "
valid_len_product_code = len ( " 12-01-02-0001 " )
2023-09-19 17:19:09 +07:00
# events - save any action done by merge
events = [ ]
2023-09-21 11:04:44 +07:00
events_json = [ ]
2023-09-19 17:19:09 +07:00
# last_change - current value of "LastChange"
last_change = " "
# last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map
last_change_path = " "
# master_json - main dictionary(map) for another json to merge into.
master_json = { }
# config_ver - target main dictionary's "configNumber"
global config_ver
config_ver = - 1
# holdonPD = the current product code that has been holding
holdonPD = " "
2023-09-21 08:59:57 +07:00
# output config
global isJson
isJson = False
2023-10-03 10:18:11 +07:00
# Keys to be removed
removed_keylist = [
" RemainingCups " ,
" RemainingCupsWithTopping "
]
2023-09-19 17:19:09 +07:00
# Set value of nested map (target - master_json) by using keys from 'key_list' and assigned value to it.
def set_value_in_nested_map ( key_list : list , value ) :
reduce ( getitem , key_list [ : - 1 ] , master_json ) [ key_list [ - 1 ] ] = value
def get_value_in_nested_map ( target_json : dict , key_list : list , isMaster = False ) :
if " Recipe01 " in key_list :
if isMaster :
key_list [ 1 ] + = shared_master_position
if " SubMenu " in key_list :
if len ( reduce ( getitem , key_list [ : 3 ] , target_json ) ) < = 0 :
return " - "
2023-10-03 10:18:11 +07:00
# Handle no key case
result = None
try :
result = reduce ( getitem , key_list [ : - 1 ] , target_json ) [ key_list [ - 1 ] ]
except :
print ( key_list , " -> " , result )
pass
return result
2023-09-19 17:19:09 +07:00
# Decode key that was in form of String to the list
def decode_path ( str_with_dot : str ) - > list :
path_map = str_with_dot . split ( ' . ' )
keylist = [ ]
for keyi in path_map :
if keyi . isdigit ( ) :
keyi = int ( keyi )
elif keyi [ 1 : ] . isdigit ( ) :
pass
keylist . append ( keyi )
return keylist
def fetch_pd ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
2023-10-03 10:18:11 +07:00
keyList . append ( " productCode " if " Recipe01 " in str_path else " id " )
2023-09-19 17:19:09 +07:00
# print("decode and append get : ",keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_matId ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
keyList . append ( " materialPathId " )
# print(keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_defaultId ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
keyList . append ( " defaultIDSelect " )
# print(keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_onlyMainMenuPath ( str_with_dot : str ) :
mainpath = decode_path ( str_with_dot ) [ : 2 ]
# '.'.join(mainpath)
return " . " . join ( str ( p ) for p in mainpath )
2023-09-21 08:59:57 +07:00
def create_map ( events_action : str , log : str , additional = [ ] ) - > str :
return {
" timestamp " : GetDateTimeString ( ) ,
" event " : events_action ,
" log " : log ,
" addition " : additional
}
2023-10-03 10:18:11 +07:00
def flatten ( x , parent = ' ' , sep = ' . ' ) - > dict :
flattened = { }
for key , value in x . items ( ) :
current_key = f " { parent } { sep } { key } " if parent else key
if isinstance ( value , dict ) :
flattened . update ( flatten ( value , current_key , sep ) )
elif isinstance ( value , list ) :
for index , item in enumerate ( value ) :
list_key = f " { current_key } { sep } { index } "
if isinstance ( item , dict ) :
flattened . update ( flatten ( item , list_key , sep ) )
else :
flattened . update ( { list_key : item } )
else :
flattened [ current_key ] = value
return flattened
def unflatten ( dictionary ) :
result_dict = { }
for key , value in dictionary . items ( ) :
parts = key . split ( ' . ' )
d = result_dict
for part in parts [ : - 1 ] :
if part . isdigit ( ) :
part = int ( part )
if isinstance ( d , list ) :
while len ( d ) < = part :
d . append ( { } )
d = d [ part ]
else :
if part not in d :
d [ part ] = [ ]
d = d [ part ]
else :
if part not in d :
d [ part ] = { }
d = d [ part ]
if parts [ - 1 ] . isdigit ( ) :
parts [ - 1 ] = int ( parts [ - 1 ] )
while len ( d ) < = parts [ - 1 ] :
d . append ( None )
d [ parts [ - 1 ] ] = value
else :
d [ parts [ - 1 ] ] = value
return result_dict
def diff ( dict1 , dict2 ) :
exist_in_dict1 = set ( dict1 . keys ( ) ) - set ( dict2 . keys ( ) )
exist_in_dict2 = set ( dict2 . keys ( ) ) - set ( dict1 . keys ( ) )
keydiff = {
key for key in set ( dict1 . keys ( ) ) & set ( dict2 . keys ( ) ) if dict1 [ key ] != dict2 [ key ]
}
2023-10-09 11:35:29 +07:00
keydiff = sorted ( keydiff )
2023-10-03 10:18:11 +07:00
# diff
diff1 = { key : dict1 [ key ] for key in exist_in_dict1 }
diff2 = { key : dict2 [ key ] for key in exist_in_dict2 }
diffval = { key : ( dict1 [ key ] , dict2 [ key ] ) for key in keydiff }
return diff1 , diff2 , diffval
def get_key ( d , target ) - > str :
for key , value in d . items ( ) :
if value == target :
return key
return None
def remove_key ( d , key ) :
if isinstance ( d , dict ) :
for key1 in list ( d . keys ( ) ) :
if key1 == key :
del d [ key1 ]
else :
remove_key ( d [ key1 ] , key )
elif isinstance ( d , list ) :
for index , item in enumerate ( d ) :
if isinstance ( item , dict ) :
if key in list ( item . keys ( ) ) :
del item [ key ]
if " SubMenu " in list ( item . keys ( ) ) :
for j , itemSub in enumerate ( item [ " SubMenu " ] ) :
if key in list ( itemSub . keys ( ) ) :
del itemSub [ key ]
def string_to_date ( str_date : str ) :
time_obj = datetime . datetime . strptime ( str_date , " %d - % b- % Y, % H: % M: % S " ) . time ( ) if " , " in str_date else datetime . datetime . strptime ( str_date , " %d - % b- % Y % H: % M: % S " ) . time ( )
return time_obj
def CompareDate ( master_lastchange , dev_lastchange ) - > str :
if master_lastchange > dev_lastchange :
return " DevLegacy "
elif master_lastchange == dev_lastchange :
return " NoChange "
elif master_lastchange < dev_lastchange :
return " DevRelease "
def GetMasterDevCompare ( key_list : list , dev_date : any ) - > bool :
master_date = string_to_date ( get_value_in_nested_map ( master_json , decode_path ( key_list ) , True ) )
comp = CompareDate ( master_date , string_to_date ( dev_date ) )
if comp == " DevLegacy " or comp == " NoChange " :
return False
elif comp == " DevRelease " :
return True
# ------------------------------------ HMTL --------------------------------------
def blinking_text ( ) :
return """
< style >
. blink {
animation : blink - animation .25 s linear infinite alternate ;
- webkit - animation : blink - animation .25 s linear infinite alternate ;
}
@keyframes blink - animation {
0 % {
opacity : 100 %
}
25 % {
opacity : 75 %
}
50 % {
opacity : 50 %
}
75 % {
opacity : 25 %
}
100 % {
opacity : 0 %
}
}
@ - webkit - keyframes blink - animation {
0 % {
opacity : 100 %
}
25 % {
opacity : 75 %
}
50 % {
opacity : 50 %
}
75 % {
opacity : 25 %
}
100 % {
opacity : 0 %
}
}
< / style >
"""
def p ( content : str , isAction : bool ) - > str :
pclass = " "
style = " style= \" float: left; margin: 15; padding: 10px; "
# style
if isAction :
if " CHANGE " in content :
style + = """
background : greenyellow ;
"""
elif " INSERT " in content :
style + = """
background : orange ;
"""
else :
if " VER " in content :
style + = """
background : aliceblue ;
"""
elif " COUNT " in content or " OUTPUT " in content or " LOG " in content :
style + = """
background : aquamarine ;
"""
elif " master " in content or " dev " in content :
style + = """
box - shadow : 2 px 2 px ;
"""
if " master " in content :
style + = " background: lightgray; margin-right: -20px; "
else :
style + = " background: lightblue; "
elif " New key " in content or " New value " in content :
style + = """
border - radius : 25 px ;
"""
pclass = " class= \" blink \" "
if " New key " in content :
style + = " background: lightgreen; "
else :
style + = " background: gold; "
style + = " \" > "
return " \t \t <p " + pclass + style + content + " </p> \n "
def searchFnForHtml ( ) :
return """
< script >
function searchLog ( ) {
var input , filter , logBody , logRow , logP , logContent ;
input = document . getElementById ( " searchInput " ) ;
filter = input . value ;
logBody = document . getElementById ( " logBody " ) ;
logRow = logBody . getElementsByTagName ( " div " ) ;
for ( let i = 0 ; i < logRow . length ; i + + ) {
p = logRow [ i ] . getElementsByTagName ( " p " ) ;
console . log ( " Total P = " , logRow [ i ] . getElementsByTagName ( " p " ) . length ) ;
for ( let p_i = 0 ; p_i < p . length ; p_i + + ) {
pcontent = p [ p_i ] . textContent | | p [ p_i ] . innerText ;
if ( pcontent . includes ( filter ) ) {
logRow [ i ] . style . display = " " ;
break ;
} else {
logRow [ i ] . style . display = " none " ;
}
}
}
}
< / script >
"""
def searchInputForHTML ( ) :
return """
< input type = " text " id = " searchInput " onkeyup = " searchLog() " placeholder = " Search logs ... " style = " top: 0; position: fixed; width: 100 % ; font-size: 16px; padding: 20px; margin: 10px; border: 1px solid " >
"""
def mergeV3pre ( flat_dict_base : dict , flat_dict_dev : dict ) - > dict :
"""
` version 3 preview `
Merge 2 dictionary files together . Given that the dict must be flattened .
` Warning ` This will overwrite the exist key with new value .
"""
merged = flat_dict_base . copy ( )
merged . update ( flat_dict_dev )
return merged
2023-09-19 17:19:09 +07:00
# Merge diff value or append it to the main file.
# 0 - master_path = master.json / to be merged
# 1 - dev_path = dev.json / to merge into another
# 2 - outfile = output
# 3 - changefile = track log
# 4 - debug = enable debug
def merge ( args ) :
2023-09-22 09:54:07 +07:00
"""
2023-10-03 10:18:11 +07:00
` version 2 `
2023-09-22 09:54:07 +07:00
Merge the 2 json files together with input of target paths , output path and changefile
: param args : A list of arguments . list [ ]
: type args : list
where each element position ;
0 - master_path = master . json / to be merged
1 - dev_path = dev . json / to merge into another
2 - outfile = merged json output
3 - changefile = track log ( json ) - - - generate by merge to html , log , json
4 - debug = enable debug
5 - requester = requester ' s name sent by client
: return : None
: rtype : None
"""
2023-09-19 17:19:09 +07:00
print ( " ARGS: => " , args )
master_path = args [ 0 ] ; dev_path = args [ 1 ] ; outfile_path = args [ 2 ] ; changefile_path = args [ 3 ] ;
2023-09-21 08:59:57 +07:00
file_path , out_ext = os . path . splitext ( changefile_path )
if " json " in out_ext :
global isJson
isJson = True
2023-09-19 17:19:09 +07:00
if len ( args ) > 4 :
global debug
debug = args [ 4 ] if args [ 4 ] != None else False
2023-09-21 13:52:00 +07:00
if len ( args ) > 5 :
global requester
requester = args [ 5 ] if args [ 5 ] != None else " "
2023-09-19 17:19:09 +07:00
#
if ( os . path . exists ( master_path ) == False ) and str ( master_path ) . isdigit ( ) :
2023-10-03 10:18:11 +07:00
if " /coffeethai02 " not in master_path :
master_path = CoffeeRecipeDirectory + " /coffeethai02_ " + str ( master_path ) + " .json "
2023-09-19 17:19:09 +07:00
master_file = open ( master_path , ' rb ' )
if ( os . path . exists ( dev_path ) == False ) and str ( dev_path ) . isdigit ( ) :
2023-10-03 10:18:11 +07:00
if " /coffeethai02 " not in dev_path :
dev_path = CoffeeRecipeDirectory + " /coffeethai02_ " + str ( dev_path ) + " .json "
2023-09-19 17:19:09 +07:00
dev_file = open ( dev_path , ' rb ' )
masterName = master_file . name ; devName = dev_file . name
master_file = master_file . raw . readall ( )
dev_file = dev_file . raw . readall ( )
print ( " Master file size => " , os . stat ( master_path ) . st_size )
print ( " Dev file size => " , os . stat ( dev_path ) . st_size )
#
2023-09-21 13:52:00 +07:00
# Requester
events_json . append ( create_map (
events_action = " REQUESTER " ,
log = " GetRequestFrom " ,
additional = [ {
" timestamp " : GetDateTimeString ( ) ,
" requester " : requester
} ] ) )
events . append ( GetDateTimeString ( ) + " \t [REQUEST] \t \t get request from \" " + requester + " \" \n " )
2023-09-21 11:04:44 +07:00
events_json . append ( create_map ( events_action = " MERGE " , log = devName . split ( " / " ) [ - 1 ] + " into " + masterName . split ( " / " ) [ - 1 ] ) )
events . append ( GetDateTimeString ( ) + " \t [MERGE] \t \t Merging " + devName . split ( " / " ) [ - 1 ] + " into " + masterName . split ( " / " ) [ - 1 ] + " \n " )
2023-09-19 17:19:09 +07:00
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
print ( events [ len ( events ) - 1 ] )
# print("Test maintain => ", MaintianUnicodeEscapeDecoder.decoder(s=master_file))
global master_json
global dev_json
master_json_file : dict = json . loads ( master_file )
master_json = master_json_file . copy ( )
dev_json_file : dict = json . loads ( dev_file )
dev_json = dev_json_file . copy ( )
config_ver = master_json [ " MachineSetting " ] [ " configNumber " ]
2023-10-03 10:18:11 +07:00
print ( config_ver )
2023-09-19 17:19:09 +07:00
global pdchange
pdchange = 0
global pdadd
pdadd = 0
# global holdonPD
holdonPD = " "
2023-10-03 10:18:11 +07:00
# Step 1: Flatten the dictionary
flattened_master = flatten ( master_json )
flattened_dev = flatten ( dev_json )
# Step 2: Check key size of flattens
try :
if debug == " debug " :
print ( " master keys = " , len ( flattened_master . keys ( ) ) , " , dev keys = " , len ( flattened_dev . keys ( ) ) )
except :
pass
# Step 3: Diff
diff1 , diff2 , diffvals = diff ( flattened_master , flattened_dev )
# print("Exist in master = ", len(diff1))
# print("Exist in dev = ", len(diff2))
# print("Unmatched values = ", len(diffvals))
# m_pre = mergeV3pre(flat_dict_base=flattened_master, flat_dict_dev=flattened_dev)
# final_m = unflatten(m_pre)
# with open("./output/mergev3.json", "w+", encoding="utf-8") as testfile:
# json.dump(final_m, testfile, indent=2, ensure_ascii=False)
# Clean unused key
for i , remove_item in enumerate ( removed_keylist ) :
remove_key ( master_json , remove_item )
2023-09-19 17:19:09 +07:00
merge_dicts ( master_json , dev_json_file )
2023-10-03 10:18:11 +07:00
2023-09-19 17:19:09 +07:00
try :
if debug == " debug " :
print ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) )
except :
pass
if ( os . path . exists ( " / " . join ( outfile_path . split ( " / " ) [ : - 1 ] ) ) == False ) :
os . makedirs ( " / " . join ( outfile_path . split ( " / " ) [ : - 1 ] ) )
if ( os . path . exists ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) ) == False ) :
os . makedirs ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) )
2023-10-03 10:18:11 +07:00
with open ( outfile_path , " w " , encoding = " utf-8 " ) as outfile :
2023-09-19 17:19:09 +07:00
json . dump ( master_json , outfile , indent = 2 , ensure_ascii = False )
2023-10-03 10:18:11 +07:00
# Create new filename by outfile_path
outfile_path , outfile_ext = os . path . splitext ( outfile_path )
# ignore ext, get the last
outfile_path_spl = str ( outfile_path ) . split ( " / " )
path_version = outfile_path_spl [ len ( outfile_path_spl ) - 1 ]
pv = path_version . split ( " _ " ) [ 1 ]
2023-09-19 17:19:09 +07:00
# Include counts
2023-09-21 11:04:44 +07:00
events_json . append ( create_map ( events_action = " COUNT " , log = " Total= " + str ( pdchange ) ) )
events_json . append ( create_map ( events_action = " COUNT " , log = " Total= " + str ( pdadd ) ) )
events_json . append ( create_map ( events_action = " OUTPUT " , log = " Finished! write output to " + outfile_path ) )
2023-10-03 10:18:11 +07:00
events_json . append ( create_map ( events_action = " LOG " , log = " Log is saved to " + file_path + " _ " + pv + " .json " ) )
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [COUNT] \t \t Total Change: " + str ( pdchange ) + " \n " )
events . append ( GetDateTimeString ( ) + " \t [COUNT] \t \t Total Insert: " + str ( pdadd ) + " \n " )
2023-09-21 08:59:57 +07:00
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [OUTPUT] \t \t Finished! write output to " + outfile_path + " \n " )
2023-10-03 10:18:11 +07:00
events . append ( GetDateTimeString ( ) + " \t [LOG] \t \t Log is saved to " + file_path + " _ " + pv + " .json " + " \n " )
2023-09-20 13:35:36 +07:00
2023-09-21 11:04:44 +07:00
# log json file
2023-10-03 10:18:11 +07:00
if not os . path . exists ( file_path + " _ " + pv + " .json " ) or os . stat ( file_path + " _ " + pv + " .json " ) . st_size == 0 :
with open ( file_path + " _ " + pv + " .json " , " w " , encoding = " utf-8 " ) as outlogfile :
2023-09-21 11:04:44 +07:00
json . dump ( { " logs " + GetDateTimeString ( ) + " * " : events_json } , outlogfile , indent = 2 , ensure_ascii = False )
2023-09-21 08:59:57 +07:00
else :
2023-10-03 10:18:11 +07:00
print ( file_path + " _ " + pv + " .json " )
logjs : dict = json . loads ( open ( file_path + " _ " + pv + " .json " , encoding = " utf-8 " ) . read ( ) )
2023-09-21 11:04:44 +07:00
logjs [ " logs " + GetDateTimeString ( ) + " * " ] = events_json
2023-10-03 10:18:11 +07:00
json . dump ( logjs , open ( file_path + " _ " + pv + " .json " , " w+ " , encoding = " utf-8 " ) , indent = 2 , ensure_ascii = False )
2023-09-21 11:04:44 +07:00
# log file
2023-10-03 10:18:11 +07:00
with open ( file_path + " _ " + pv + " .log " , " a+ " ) as outlogfile2 :
2023-09-21 11:04:44 +07:00
try :
2023-09-21 08:59:57 +07:00
for event in events :
2023-09-21 11:04:44 +07:00
outlogfile2 . write ( event )
except :
raise Exception ( event )
2023-09-21 08:59:57 +07:00
2023-09-20 13:35:36 +07:00
# Create html version
2023-10-03 10:18:11 +07:00
# add version of master to before extension
html_string = " "
if os . path . exists ( file_path + " _ " + pv + " .html " ) :
html_string = " "
else :
html_string = blinking_text ( ) + searchFnForHtml ( ) + " \n " + searchInputForHTML ( ) + " \n "
with open ( file_path + " _ " + pv + " .html " , " a+ " ) as outlogHtml :
html_string + = " <div id= \" logBody \" style= \" display: flex; flex-direction: column; margin-top: 100 \" > \n "
2023-09-20 13:35:36 +07:00
for event in events :
# Create div
2023-10-03 10:18:11 +07:00
html_string + = " \t <div id= \" logRow \" style= \" display: inline-block; "
if " REQUEST " in event :
html_string + = " background: burlywoord \" > \n "
elif " INSERT " in event :
html_string + = " background: darkkhaki \" > \n "
elif " CHANGE " in event :
html_string + = " background: beige \" > \n "
else :
html_string + = " background: antiquewhite \" > \n "
2023-09-20 13:35:36 +07:00
event_fraction = str ( event ) . split ( " \t " )
for i in event_fraction :
if i != " " and i != " \n " and i != " --- " :
if " | " in i and not i . endswith ( " | " ) :
# CHANGE
spl_text = i . split ( " | " )
2023-10-03 10:18:11 +07:00
html_string + = p ( spl_text [ 0 ] , False )
html_string + = p ( spl_text [ 1 ] . replace ( " \n " , " " ) , False )
2023-09-20 13:35:36 +07:00
elif " >>> " in i :
# INSERT
spl_text = i . split ( " >>> " )
2023-10-03 10:18:11 +07:00
html_string + = p ( spl_text [ 0 ] , False )
html_string + = p ( spl_text [ 1 ] . replace ( " \n " , " " ) , False )
2023-09-20 13:35:36 +07:00
elif i . endswith ( " | " ) :
2023-10-03 10:18:11 +07:00
# Last change
html_string + = p ( i [ : - 1 ] , False )
2023-09-20 13:35:36 +07:00
else :
# print("Default = ", i)
# Either version, status or others
2023-10-03 10:18:11 +07:00
if " CHANGE " in i or " INSERT " in i :
html_string + = p ( i . replace ( " \n " , " " ) , True )
else :
html_string + = p ( i . replace ( " \n " , " " ) , False )
html_string + = " \t </div> \n "
html_string + = " </div> \n "
outlogHtml . write ( html_string )
2023-09-20 13:35:36 +07:00
2023-09-21 08:59:57 +07:00
2023-09-20 13:35:36 +07:00
2023-09-19 17:19:09 +07:00
# Merge dictionary - called by `merge`, using when the value is `dict` type
# original - main file to merge/append value into
# updated - source of new value (must be in the same structure)
# path - default = "". This variable is used for saving keys as path for accessing nested map
def merge_dicts ( original : dict , updated : dict , path = " " ) :
for key , value in updated . items ( ) :
current_path = f " { path } . { key } " if path else key
if key in original :
#
if key == " Timestamp " :
global pre_timestamp
pre_timestamp = value
# change timestamp
# original["Timestamp"] = GetDateTimeString()
cfgnum = " "
# events.append(GetDateTimeString()+"\t[TIMESTMP]\tLast Generated: "+value+cfgnum+"\tNew Generated at "+original["Timestamp"]+" \n")
# print(events[len(events) - 1])
else :
if key == " LastChange " :
global last_change
global last_change_path
last_change = value
last_change_path = current_path
# print("[LastChange] LastChange: ",last_change_path, " value = ",value)
2023-10-03 10:18:11 +07:00
2023-09-19 17:19:09 +07:00
if isinstance ( value , dict ) and isinstance ( original [ key ] , dict ) :
merge_dicts ( original [ key ] , value , current_path )
elif isinstance ( value , list ) and isinstance ( original [ key ] , list ) :
merge_lists ( original [ key ] , value , current_path )
else :
2023-10-03 10:18:11 +07:00
# Detect diff and "LastChange must diff"
2023-09-19 17:19:09 +07:00
if original [ key ] != value :
if key == " configNumber " :
# use master version
global config_ver
config_ver = master_json [ " MachineSetting " ] [ " configNumber " ]
# original[key] = config_ver
cfgnum = " \t [VER. " + str ( config_ver ) + " ] " if config_ver != - 1 else " \t [...] "
try :
if debug == " debug " :
print ( " Config number " , config_ver )
except :
pass
2023-09-21 08:59:57 +07:00
2023-09-21 11:04:44 +07:00
events_json . append (
create_map ( events_action = " MERGE_TARGETS " , log = " Found `configNumber` => master version " + str ( config_ver ) + " , merged with " + str ( value ) ) )
events . append ( GetDateTimeString ( ) + cfgnum + " \t Found `configNumber` => master version " + str ( config_ver ) + " , merged with " + str ( value ) + " \n " )
2023-10-03 10:18:11 +07:00
elif last_change_path != " " and GetMasterDevCompare ( last_change_path , last_change ) :
2023-09-19 17:19:09 +07:00
lc = last_change if last_change != " " else pre_timestamp
try :
if debug == " debug " :
2023-09-20 13:35:36 +07:00
print ( " Encounter path --> " + current_path , " | master: " , original [ key ] , " dev: " , value )
2023-09-19 17:19:09 +07:00
except :
pass
if " Recipe01 " in current_path and not " recipes " in current_path :
global holdonPD
global pdchange
if " productCode " in original and holdonPD != original [ " productCode " ] :
holdonPD = original [ " productCode " ]
pdchange + = 1
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " CHANGE " ,
log = " " ,
additional = [
{
" version " : config_ver ,
" master_last_change " : ( get_value_in_nested_map ( master_json , decode_path ( last_change_path ) , isMaster = True ) ) ,
" dev_last_change " : last_change ,
" pd " : holdonPD ,
2023-10-03 10:18:11 +07:00
" name " : original [ " name " ] if " name " in original else " " ,
2023-09-21 08:59:57 +07:00
" fullpath " : current_path
}
]
) )
2023-10-03 10:18:11 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t [CHANGE] \t " + " LastChange: " + ( get_value_in_nested_map ( master_json , decode_path ( last_change_path ) , isMaster = True ) ) + " (master) | \t " + last_change + " (dev) \t " + " --- \t \" " + holdonPD + " \" \n " )
# elif "Recipe01" not in current_path:
# # reset holded pd
# holdonPD = None
2023-09-19 17:19:09 +07:00
# override original value by value from updated(dev)
2023-10-03 10:18:11 +07:00
# if GetMasterDevCompare(last_change_path, last_change):
2023-09-19 17:19:09 +07:00
original [ key ] = value
2023-10-03 10:18:11 +07:00
elif " Recipe01 " not in current_path and " Topping " not in current_path and " configNumber " not in current_path :
holdonPD = None
# Increase change tracking number
pdchange + = 1
# Enable this for more logs
try :
if debug == " all " :
events . append (
GetDateTimeString ( ) +
" \t [VER. " + ( str ( config_ver ) if config_ver != - 1 else " Detecting... " ) +
" ] \t [CHANGE] \t " + " \" " + current_path + " \" \n " )
events_json . append ( create_map (
events_action = " CHANGE " ,
log = " " ,
additional = [
{
" version " : config_ver ,
" fullpath " : current_path ,
}
]
) )
except :
pass
try :
if debug == " debug " :
print ( events [ len ( events ) - 1 ] )
except :
pass
original [ key ] = value
2023-09-19 17:19:09 +07:00
else :
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT+ " ,
log = " NewKeyValue " ,
additional = [ {
" version " : config_ver ,
2023-10-03 10:18:11 +07:00
" pd " : holdonPD if holdonPD != None else current_path ,
2023-09-21 08:59:57 +07:00
" new_value " : value ,
" fullpath " : current_path
} ]
) )
2023-10-03 10:18:11 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t \t \t [INSERT] \t " + " \t New key & value >>> \" " + ( holdonPD if holdonPD != None else current_path ) + " \" @ " + str ( current_path ) + ( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
try :
if debug == " debug " :
print ( " Add path --> " + path , " | master: " , " dev: " , str ( value ) )
except :
pass
global pdadd
pdadd + = 1
2023-10-03 10:18:11 +07:00
2023-09-19 17:19:09 +07:00
original [ key ] = value
# Merge list - called by `merge_dict`, using when the value is `dict` type
# original - main file to merge/append value into
# updated - source of new value (must be in the same structure)
# path - default = "". This variable is used for saving keys as path for accessing nested map
#
# Update v2: Fix bug where index matched but replacing unrelated
# Update v2.1: Fix unrelated & new log format
def merge_lists ( original , updated , path = " " ) :
for i , item in enumerate ( updated ) :
current_path = f " { path } . { i } "
if isinstance ( item , dict ) :
2023-10-03 10:18:11 +07:00
if i < = len ( original ) - 1 and isinstance ( original [ i ] , dict ) :
2023-09-19 17:19:09 +07:00
# events.append("Merge dictionary: "+current_path)
if path == " Recipe01 " :
j = 0
if " productCode " not in original [ i ] . keys ( ) :
key = " name "
else :
key = " productCode "
while original [ j ] [ key ] != item [ " productCode " ] and j < len ( original ) - 1 :
j + = 1
# override index; share index to other functions
global shared_master_position
shared_master_position = j - i
# print("Found target index: ", j," (master) ",i," (dev) " ," check matched? ", original[j][key] == item[key], " use key: ", key, " path: ", current_path )
2023-10-03 10:18:11 +07:00
merge_dicts ( original [ j ] , item , current_path )
elif path == " MaterialSetting " :
k = 0
key = " id "
while original [ k ] [ key ] != item [ key ] and k < len ( original ) - 1 :
k + = 1
shared_master_position = k - i
merge_dicts ( original [ k ] , item , current_path )
2023-09-19 17:19:09 +07:00
else :
2023-10-03 10:18:11 +07:00
# found what
2023-09-19 17:19:09 +07:00
merge_dicts ( original [ i ] , item , current_path )
elif item not in original :
try :
if debug == " debug " :
print ( " Append dict@ i= " , i , " path: " , current_path )
except :
pass
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT " ,
log = " AppendDict " ,
additional = [ {
" version " : config_ver ,
2023-10-03 10:18:11 +07:00
" pd " : fetch_pd ( current_path , dev_json ) if " Topping " not in current_path else current_path ,
2023-09-21 08:59:57 +07:00
" fullpath " : current_path
} ]
) )
2023-10-03 10:18:11 +07:00
events . append (
GetDateTimeString ( ) +
" \t [VER. " + str ( config_ver ) +
" ] \t \t \t [INSERT] \t " +
" \t New value >>> " +
( fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , dev_json ) if " Topping " not in current_path else current_path )
+ ( " @ " + current_path if " Topping " not in current_path else " " ) +
( " -> " + fetch_pd ( current_path , dev_json ) if fetch_pd ( current_path , dev_json ) != None else " " ) +
( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
global pdadd
pdadd + = 1
original . append ( item )
elif item not in original :
try :
if debug == " debug " :
print ( " Append list@ i= " , i , " path: " , current_path )
except :
pass
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT " ,
log = " AppendList " ,
additional = [ {
" version " : config_ver ,
" pd " : fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) ,
" fullpath " : current_path
} ]
) )
2023-10-03 10:18:11 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t \t \t [INSERT] \t " + " \t New value >>> " + str ( fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) ) + " , " + current_path + ( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
pdadd + = 1
original . append ( item )
2023-10-03 10:18:11 +07:00
2023-10-05 11:26:49 +07:00
# Diff recipe = interactive diff
# diff_recipe(master, "v1-v2-v3")
# {"key_diff": [master_value, dev_value]}
2023-10-09 11:35:29 +07:00
def diff_recipe ( args , * * kwargs ) :
2023-10-05 11:26:49 +07:00
' `version 1` '
master_path = args [ 0 ] ; versions_to_diff = args [ 1 ]
vs = versions_to_diff . split ( " - " )
2023-10-09 11:35:29 +07:00
default_dir = " ../server/cofffeemachineConfig/ "
2023-10-05 11:26:49 +07:00
default_name = " coffeethai02_ "
m_json = open ( default_dir + default_name + master_path + " .json " , " r " , encoding = " utf-8 " ) . read ( )
master_json_diff = json . loads ( m_json )
2023-10-09 11:35:29 +07:00
# flag settings
is_flat = None
is_nooutput = None
for key , value in kwargs . items ( ) :
if key == " flatten " :
is_flat = True if str ( value ) . lower ( ) == " true " else False
if args [ 2 ] == " debug " :
print ( " Flatten: " , is_flat )
elif key == " out " :
is_nooutput = True if str ( value ) . lower ( ) == " false " else False
if args [ 2 ] == " debug " :
print ( " No output: " , is_nooutput )
2023-10-05 11:26:49 +07:00
results = [ ]
2023-10-09 11:35:29 +07:00
if is_nooutput :
print ( " Guide: { key: (master, dev)} " )
2023-10-05 11:26:49 +07:00
try :
for v in vs :
2023-10-09 11:35:29 +07:00
if is_nooutput :
if is_flat :
print ( " \n ---------------------- " , master_path + " - " + v , " ---------------------- \n " , diff ( flatten ( master_json_diff ) , flatten ( json . loads ( open ( default_dir + default_name + v + " .json " , " r " , encoding = " utf-8 " ) . read ( ) ) ) , ) [ 2 ] , " \n " )
else :
print ( diff ( " \n ---------------------- " , master_path + " - " + v , " ---------------------- \n " , master_json_diff , json . loads ( open ( default_dir + default_name + v + " .json " , " r " , encoding = " utf-8 " ) . read ( ) ) , ) [ 2 ] )
continue
if is_flat :
results . append ( {
" diff_between " : master_path + " - " + v ,
" result " : diff ( flatten ( master_json_diff ) , flatten ( json . loads ( open ( default_dir + default_name + v + " .json " , " r " , encoding = " utf-8 " ) . read ( ) ) ) , ) [ 2 ]
} )
else :
results . append ( {
" diff_between " : master_path + " - " + v ,
" result " : diff ( master_json_diff , json . loads ( open ( default_dir + default_name + v + " .json " , " r " , encoding = " utf-8 " ) . read ( ) ) , ) [ 2 ]
} )
2023-10-05 11:26:49 +07:00
except :
print ( " Error diffing file " )
for r in results :
if len ( args ) > 2 and args [ 2 ] == " debug " :
print ( r [ " diff_between " ] , " | diff len = " , len ( r [ " result " ] ) )
# write to file
with open ( default_dir + " /diff/ " + r [ " diff_between " ] + " .json " , " w " , encoding = " utf-8 " ) as f :
json . dump ( r [ " result " ] , f , indent = 2 , ensure_ascii = False )
2023-10-09 11:35:29 +07:00
if is_nooutput :
print ( " Guide: { key: (master, dev)} " )
2023-10-05 11:26:49 +07:00
print ( " OK " )
2023-09-19 17:19:09 +07:00
def main ( ) :
command_line = sys . argv [ 1 ]
2023-09-20 13:35:36 +07:00
print ( sys . argv )
2023-09-19 17:19:09 +07:00
if command_line == " merge " :
merge ( sys . argv [ 2 : ] )
2023-10-05 11:26:49 +07:00
elif command_line == " diff " :
2023-10-09 11:35:29 +07:00
diff_recipe ( sys . argv [ 2 : ] , * * dict ( arg . split ( " = " ) for arg in sys . argv [ 5 : ] ) )
2023-09-19 17:19:09 +07:00
2023-09-20 13:35:36 +07:00
if __name__ == " __main__ " :
main ( )