2023-09-19 17:19:09 +07:00
import datetime
2023-09-20 13:35:36 +07:00
from functools import reduce
2023-09-19 17:19:09 +07:00
import json
2023-09-20 13:35:36 +07:00
from operator import getitem
2023-09-19 17:19:09 +07:00
import sys
2023-09-20 13:35:36 +07:00
import os
import itertools
2023-09-19 17:19:09 +07:00
2023-09-20 13:35:36 +07:00
# /home/pakin/Codes/coffCfg/cofffeemachineConfig/coffeethai02_1550.json
2023-09-19 17:19:09 +07:00
def GetDateTimeString ( ) :
2023-09-20 13:35:36 +07:00
now = datetime . datetime . now ( ) # current date and time
2023-09-19 17:19:09 +07:00
date_time = now . strftime ( " %d - % b- % Y, % H: % M: % S " )
return date_time
2023-09-20 13:35:36 +07:00
HomeDirectory = " /home/pakin "
CoffeeRecipeDirectory = " /home/pakin/Codes/coffCfg/cofffeemachineConfig "
valid_len_product_code = len ( " 12-01-02-0001 " )
2023-09-19 17:19:09 +07:00
# events - save any action done by merge
events = [ ]
2023-09-21 11:04:44 +07:00
events_json = [ ]
2023-09-19 17:19:09 +07:00
# last_change - current value of "LastChange"
last_change = " "
# last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map
last_change_path = " "
# master_json - main dictionary(map) for another json to merge into.
master_json = { }
# config_ver - target main dictionary's "configNumber"
global config_ver
config_ver = - 1
# holdonPD = the current product code that has been holding
holdonPD = " "
2023-09-21 08:59:57 +07:00
# output config
global isJson
isJson = False
2023-09-19 17:19:09 +07:00
# Set value of nested map (target - master_json) by using keys from 'key_list' and assigned value to it.
def set_value_in_nested_map ( key_list : list , value ) :
reduce ( getitem , key_list [ : - 1 ] , master_json ) [ key_list [ - 1 ] ] = value
def get_value_in_nested_map ( target_json : dict , key_list : list , isMaster = False ) :
if " Recipe01 " in key_list :
if isMaster :
key_list [ 1 ] + = shared_master_position
if " SubMenu " in key_list :
if len ( reduce ( getitem , key_list [ : 3 ] , target_json ) ) < = 0 :
return " - "
return reduce ( getitem , key_list [ : - 1 ] , target_json ) [ key_list [ - 1 ] ]
# Decode key that was in form of String to the list
def decode_path ( str_with_dot : str ) - > list :
path_map = str_with_dot . split ( ' . ' )
keylist = [ ]
for keyi in path_map :
if keyi . isdigit ( ) :
keyi = int ( keyi )
elif keyi [ 1 : ] . isdigit ( ) :
pass
keylist . append ( keyi )
return keylist
def fetch_pd ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
keyList . append ( " productCode " )
# print("decode and append get : ",keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_matId ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
keyList . append ( " materialPathId " )
# print(keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_defaultId ( str_path : str , target_dict : dict ) - > str :
keyList = decode_path ( str_with_dot = str_path )
keyList . append ( " defaultIDSelect " )
# print(keyList)
return get_value_in_nested_map ( target_json = target_dict , key_list = keyList )
def fetch_onlyMainMenuPath ( str_with_dot : str ) :
mainpath = decode_path ( str_with_dot ) [ : 2 ]
# '.'.join(mainpath)
return " . " . join ( str ( p ) for p in mainpath )
2023-09-21 08:59:57 +07:00
def create_map ( events_action : str , log : str , additional = [ ] ) - > str :
return {
" timestamp " : GetDateTimeString ( ) ,
" event " : events_action ,
" log " : log ,
" addition " : additional
}
2023-09-19 17:19:09 +07:00
# Merge diff value or append it to the main file.
# 0 - master_path = master.json / to be merged
# 1 - dev_path = dev.json / to merge into another
# 2 - outfile = output
# 3 - changefile = track log
# 4 - debug = enable debug
def merge ( args ) :
2023-09-22 09:54:07 +07:00
"""
Merge the 2 json files together with input of target paths , output path and changefile
: param args : A list of arguments . list [ ]
: type args : list
where each element position ;
0 - master_path = master . json / to be merged
1 - dev_path = dev . json / to merge into another
2 - outfile = merged json output
3 - changefile = track log ( json ) - - - generate by merge to html , log , json
4 - debug = enable debug
5 - requester = requester ' s name sent by client
: return : None
: rtype : None
"""
2023-09-19 17:19:09 +07:00
print ( " ARGS: => " , args )
master_path = args [ 0 ] ; dev_path = args [ 1 ] ; outfile_path = args [ 2 ] ; changefile_path = args [ 3 ] ;
2023-09-21 08:59:57 +07:00
file_path , out_ext = os . path . splitext ( changefile_path )
if " json " in out_ext :
global isJson
isJson = True
2023-09-19 17:19:09 +07:00
if len ( args ) > 4 :
global debug
debug = args [ 4 ] if args [ 4 ] != None else False
2023-09-21 13:52:00 +07:00
if len ( args ) > 5 :
global requester
requester = args [ 5 ] if args [ 5 ] != None else " "
2023-09-19 17:19:09 +07:00
#
if ( os . path . exists ( master_path ) == False ) and str ( master_path ) . isdigit ( ) :
master_path = CoffeeRecipeDirectory + " /coffeethai02_ " + str ( master_path ) + " .json "
master_file = open ( master_path , ' rb ' )
if ( os . path . exists ( dev_path ) == False ) and str ( dev_path ) . isdigit ( ) :
dev_path = CoffeeRecipeDirectory + " /coffeethai02_ " + str ( dev_path ) + " .json "
dev_file = open ( dev_path , ' rb ' )
masterName = master_file . name ; devName = dev_file . name
master_file = master_file . raw . readall ( )
dev_file = dev_file . raw . readall ( )
print ( " Master file size => " , os . stat ( master_path ) . st_size )
print ( " Dev file size => " , os . stat ( dev_path ) . st_size )
#
2023-09-21 13:52:00 +07:00
# Requester
events_json . append ( create_map (
events_action = " REQUESTER " ,
log = " GetRequestFrom " ,
additional = [ {
" timestamp " : GetDateTimeString ( ) ,
" requester " : requester
} ] ) )
events . append ( GetDateTimeString ( ) + " \t [REQUEST] \t \t get request from \" " + requester + " \" \n " )
2023-09-21 11:04:44 +07:00
events_json . append ( create_map ( events_action = " MERGE " , log = devName . split ( " / " ) [ - 1 ] + " into " + masterName . split ( " / " ) [ - 1 ] ) )
events . append ( GetDateTimeString ( ) + " \t [MERGE] \t \t Merging " + devName . split ( " / " ) [ - 1 ] + " into " + masterName . split ( " / " ) [ - 1 ] + " \n " )
2023-09-19 17:19:09 +07:00
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
print ( events [ len ( events ) - 1 ] )
# print("Test maintain => ", MaintianUnicodeEscapeDecoder.decoder(s=master_file))
global master_json
global dev_json
master_json_file : dict = json . loads ( master_file )
master_json = master_json_file . copy ( )
dev_json_file : dict = json . loads ( dev_file )
dev_json = dev_json_file . copy ( )
config_ver = master_json [ " MachineSetting " ] [ " configNumber " ]
global pdchange
pdchange = 0
global pdadd
pdadd = 0
# global holdonPD
holdonPD = " "
merge_dicts ( master_json , dev_json_file )
# print(master_json["MachineSetting"]["configNumber"])
try :
if debug == " debug " :
print ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) )
except :
pass
if ( os . path . exists ( " / " . join ( outfile_path . split ( " / " ) [ : - 1 ] ) ) == False ) :
os . makedirs ( " / " . join ( outfile_path . split ( " / " ) [ : - 1 ] ) )
if ( os . path . exists ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) ) == False ) :
os . makedirs ( " / " . join ( changefile_path . split ( " / " ) [ : - 1 ] ) )
with open ( outfile_path , " w+ " , encoding = " utf-8 " ) as outfile :
json . dump ( master_json , outfile , indent = 2 , ensure_ascii = False )
# Include counts
2023-09-21 11:04:44 +07:00
events_json . append ( create_map ( events_action = " COUNT " , log = " Total= " + str ( pdchange ) ) )
events_json . append ( create_map ( events_action = " COUNT " , log = " Total= " + str ( pdadd ) ) )
events_json . append ( create_map ( events_action = " OUTPUT " , log = " Finished! write output to " + outfile_path ) )
events_json . append ( create_map ( events_action = " LOG " , log = " Log is saved to " + changefile_path ) )
events . append ( GetDateTimeString ( ) + " \t [COUNT] \t \t Total Change: " + str ( pdchange ) + " \n " )
events . append ( GetDateTimeString ( ) + " \t [COUNT] \t \t Total Insert: " + str ( pdadd ) + " \n " )
2023-09-21 08:59:57 +07:00
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [OUTPUT] \t \t Finished! write output to " + outfile_path + " \n " )
events . append ( GetDateTimeString ( ) + " \t [LOG] \t \t Log is saved to " + changefile_path + " \n " )
2023-09-20 13:35:36 +07:00
2023-09-21 11:04:44 +07:00
# log json file
2023-09-21 13:52:00 +07:00
if not os . path . exists ( file_path + " .json " ) or os . stat ( file_path + " .json " ) . st_size == 0 :
2023-09-21 11:04:44 +07:00
with open ( file_path + " .json " , " w+ " , encoding = " utf-8 " ) as outlogfile :
json . dump ( { " logs " + GetDateTimeString ( ) + " * " : events_json } , outlogfile , indent = 2 , ensure_ascii = False )
2023-09-21 08:59:57 +07:00
else :
2023-09-21 11:04:44 +07:00
print ( file_path + " .json " )
logjs : dict = json . loads ( open ( file_path + " .json " , encoding = " utf-8 " ) . read ( ) )
logjs [ " logs " + GetDateTimeString ( ) + " * " ] = events_json
json . dump ( logjs , open ( file_path + " .json " , " w+ " , encoding = " utf-8 " ) , indent = 2 , ensure_ascii = False )
# log file
with open ( file_path + " .log " , " a+ " ) as outlogfile2 :
try :
2023-09-21 08:59:57 +07:00
for event in events :
2023-09-21 11:04:44 +07:00
outlogfile2 . write ( event )
except :
raise Exception ( event )
2023-09-21 08:59:57 +07:00
2023-09-20 13:35:36 +07:00
# Create html version
2023-09-21 11:04:44 +07:00
with open ( file_path + " .html " , " a+ " ) as outlogHtml :
2023-09-20 13:35:36 +07:00
for event in events :
# Create div
# print("Log as list: ",str(event).split("\t"))
2023-09-22 09:54:07 +07:00
html_string = " \t <div class= \" flex p-4 bg-stone-300 rounded m-2 \" > \n "
2023-09-20 13:35:36 +07:00
event_fraction = str ( event ) . split ( " \t " )
for i in event_fraction :
if i != " " and i != " \n " and i != " --- " :
if " | " in i and not i . endswith ( " | " ) :
# CHANGE
spl_text = i . split ( " | " )
html_string + = " \t \t <p> " + spl_text [ 0 ] + " </p> \n "
html_string + = " \t \t <p> " + spl_text [ 1 ] . replace ( " \n " , " " ) + " </p> \n "
elif " >>> " in i :
# INSERT
spl_text = i . split ( " >>> " )
html_string + = " \t \t <p> " + spl_text [ 0 ] + " </p> \n "
html_string + = " \t \t <p> " + spl_text [ 1 ] . replace ( " \n " , " " ) + " </p> \n "
elif i . endswith ( " | " ) :
html_string + = " \t \t <p> " + i [ : - 1 ] + " </p> \n "
else :
# print("Default = ", i)
# Either version, status or others
html_string + = " \t \t <p> " + i . replace ( " \n " , " " ) + " </p> \n "
html_string + = " \t </div> \n "
outlogHtml . write ( html_string )
2023-09-21 08:59:57 +07:00
2023-09-20 13:35:36 +07:00
2023-09-19 17:19:09 +07:00
# Merge dictionary - called by `merge`, using when the value is `dict` type
# original - main file to merge/append value into
# updated - source of new value (must be in the same structure)
# path - default = "". This variable is used for saving keys as path for accessing nested map
def merge_dicts ( original : dict , updated : dict , path = " " ) :
for key , value in updated . items ( ) :
current_path = f " { path } . { key } " if path else key
if key in original :
#
if key == " Timestamp " :
global pre_timestamp
pre_timestamp = value
# change timestamp
# original["Timestamp"] = GetDateTimeString()
cfgnum = " "
# events.append(GetDateTimeString()+"\t[TIMESTMP]\tLast Generated: "+value+cfgnum+"\tNew Generated at "+original["Timestamp"]+" \n")
# print(events[len(events) - 1])
else :
if key == " LastChange " :
global last_change
global last_change_path
last_change = value
last_change_path = current_path
# print("[LastChange] LastChange: ",last_change_path, " value = ",value)
if isinstance ( value , dict ) and isinstance ( original [ key ] , dict ) :
merge_dicts ( original [ key ] , value , current_path )
elif isinstance ( value , list ) and isinstance ( original [ key ] , list ) :
merge_lists ( original [ key ] , value , current_path )
else :
# Detect diff
if original [ key ] != value :
if key == " configNumber " :
# use master version
global config_ver
config_ver = master_json [ " MachineSetting " ] [ " configNumber " ]
# original[key] = config_ver
cfgnum = " \t [VER. " + str ( config_ver ) + " ] " if config_ver != - 1 else " \t [...] "
try :
if debug == " debug " :
print ( " Config number " , config_ver )
except :
pass
2023-09-21 08:59:57 +07:00
2023-09-21 11:04:44 +07:00
events_json . append (
create_map ( events_action = " MERGE_TARGETS " , log = " Found `configNumber` => master version " + str ( config_ver ) + " , merged with " + str ( value ) ) )
events . append ( GetDateTimeString ( ) + cfgnum + " \t Found `configNumber` => master version " + str ( config_ver ) + " , merged with " + str ( value ) + " \n " )
2023-09-19 17:19:09 +07:00
else :
lc = last_change if last_change != " " else pre_timestamp
try :
if debug == " debug " :
2023-09-20 13:35:36 +07:00
print ( " Encounter path --> " + current_path , " | master: " , original [ key ] , " dev: " , value )
2023-09-19 17:19:09 +07:00
except :
pass
if " Recipe01 " in current_path and not " recipes " in current_path :
global holdonPD
global pdchange
if " productCode " in original and holdonPD != original [ " productCode " ] :
holdonPD = original [ " productCode " ]
pdchange + = 1
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " CHANGE " ,
log = " " ,
additional = [
{
" version " : config_ver ,
" master_last_change " : ( get_value_in_nested_map ( master_json , decode_path ( last_change_path ) , isMaster = True ) ) ,
" dev_last_change " : last_change ,
" pd " : holdonPD ,
" name " : original [ " name " ] ,
" fullpath " : current_path
}
]
) )
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] [CHANGE] \t " + " LastChange: " + ( get_value_in_nested_map ( master_json , decode_path ( last_change_path ) , isMaster = True ) ) + " (master) | \t " + last_change + " (dev) \t " + " --- \t \" " + holdonPD + " \" \n " )
2023-09-19 17:19:09 +07:00
# elif "MaterialSetting" in current_path:
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
# override original value by value from updated(dev)
original [ key ] = value
else :
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT+ " ,
log = " NewKeyValue " ,
additional = [ {
" version " : config_ver ,
" pd " : holdonPD ,
" new_value " : value ,
" fullpath " : current_path
} ]
) )
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t \t \t [INSERT] \t " + " \t New key & value >>> \" " + holdonPD + " \" " + str ( value ) + ( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
try :
if debug == " debug " :
print ( " Add path --> " + path , " | master: " , " dev: " , str ( value ) )
except :
pass
global pdadd
pdadd + = 1
original [ key ] = value
# Merge list - called by `merge_dict`, using when the value is `dict` type
# original - main file to merge/append value into
# updated - source of new value (must be in the same structure)
# path - default = "". This variable is used for saving keys as path for accessing nested map
#
# Update v2: Fix bug where index matched but replacing unrelated
# Update v2.1: Fix unrelated & new log format
def merge_lists ( original , updated , path = " " ) :
for i , item in enumerate ( updated ) :
current_path = f " { path } . { i } "
if isinstance ( item , dict ) :
if i < len ( original ) - 1 and isinstance ( original [ i ] , dict ) :
# events.append("Merge dictionary: "+current_path)
if path == " Recipe01 " :
j = 0
if " productCode " not in original [ i ] . keys ( ) :
key = " name "
else :
key = " productCode "
while original [ j ] [ key ] != item [ " productCode " ] and j < len ( original ) - 1 :
j + = 1
# override index; share index to other functions
global shared_master_position
shared_master_position = j - i
# print("Found target index: ", j," (master) ",i," (dev) " ," check matched? ", original[j][key] == item[key], " use key: ", key, " path: ", current_path )
merge_dicts ( original [ j ] , item , current_path )
else :
merge_dicts ( original [ i ] , item , current_path )
elif item not in original :
try :
if debug == " debug " :
print ( " Append dict@ i= " , i , " path: " , current_path )
except :
pass
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT " ,
log = " AppendDict " ,
additional = [ {
" version " : config_ver ,
" pd " : fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) ,
" fullpath " : current_path
} ]
) )
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t \t \t [INSERT] \t " + " \t New value >>> " + fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) + ( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
global pdadd
pdadd + = 1
original . append ( item )
elif item not in original :
try :
if debug == " debug " :
print ( " Append list@ i= " , i , " path: " , current_path )
except :
pass
2023-09-21 11:04:44 +07:00
events_json . append ( create_map (
2023-09-21 08:59:57 +07:00
events_action = " INSERT " ,
log = " AppendList " ,
additional = [ {
" version " : config_ver ,
" pd " : fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) ,
" fullpath " : current_path
} ]
) )
2023-09-21 11:04:44 +07:00
events . append ( GetDateTimeString ( ) + " \t [VER. " + str ( config_ver ) + " ] \t \t \t [INSERT] \t " + " \t New value >>> " + fetch_pd ( fetch_onlyMainMenuPath ( current_path ) , master_json ) + ( " \t \t \t " ) + " \n " )
2023-09-19 17:19:09 +07:00
pdadd + = 1
original . append ( item )
def main ( ) :
command_line = sys . argv [ 1 ]
2023-09-20 13:35:36 +07:00
print ( sys . argv )
2023-09-19 17:19:09 +07:00
if command_line == " merge " :
merge ( sys . argv [ 2 : ] )
2023-09-20 13:35:36 +07:00
if __name__ == " __main__ " :
main ( )