Add git submodule & py api (not yet add to path)
This commit is contained in:
parent
eb12adfa82
commit
984707c7bf
4 changed files with 379 additions and 0 deletions
1
server/.gitignore
vendored
Normal file
1
server/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
/cofffeemachineConfig
|
||||
4
server/.gitmodules
vendored
Normal file
4
server/.gitmodules
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
[submodule "cofffeemachineConfig"]
|
||||
path = cofffeemachineConfig
|
||||
url = ssh://ikong@192.168.10.159/1TBHDD/ikong/repo/cofffeemachineConfig
|
||||
branch = master
|
||||
|
|
@ -8,3 +8,7 @@ git clone --depth 1 --no-checkout --filter=blob:none ssh://ikong@192.168.10.159/
|
|||
cd cofffeemachineConfig
|
||||
git checkout master -- coffeethai02_577.json
|
||||
cd ..
|
||||
|
||||
|
||||
# Submodule
|
||||
git submodule add -b master ssh://ikong@192.168.10.159/1TBHDD/ikong/repo/cofffeemachineConfig
|
||||
|
|
|
|||
370
server/python_api/merge_recipe.py
Normal file
370
server/python_api/merge_recipe.py
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
#!/usr/local/bin/python3
|
||||
#!/usr/bin/python3
|
||||
|
||||
from __future__ import print_function
|
||||
from functools import reduce
|
||||
from operator import getitem
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import sys
|
||||
import subprocess
|
||||
from io import StringIO
|
||||
import datetime
|
||||
import json
|
||||
import string
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
|
||||
import google.auth
|
||||
import UnifyGen
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
unicode = str
|
||||
|
||||
HomeDirectory = "/Users/wanloprungsiyangkul/"
|
||||
CoffeeRecipeDirectory = "/Users/wanloprungsiyangkul/cofffeemachineConfig"
|
||||
valid_len_product_code = len("12-01-02-0001")
|
||||
|
||||
def xl2int(s):
|
||||
s = s.strip().upper()
|
||||
return sum((ord(c)-ord('A')+1)*26**i
|
||||
for i,c in enumerate(reversed(s)))
|
||||
|
||||
def GetDateTimeString():
|
||||
now = datetime.now() # current date and time
|
||||
date_time = now.strftime("%d-%b-%Y, %H:%M:%S")
|
||||
return date_time
|
||||
|
||||
|
||||
def ImportFileAndUpdateMenuName( recipe_version, menu_name_tsv, output_file):
|
||||
file_name_recipe = CoffeeRecipeDirectory + "/coffeethai02_" + str(recipe_version) + ".json"
|
||||
print("file_name_recipe = " + file_name_recipe)
|
||||
file_tsv = open( menu_name_tsv) #open requested file
|
||||
|
||||
with open( file_name_recipe, 'r') as JSON:
|
||||
jdatclass = json.load(JSON)
|
||||
|
||||
print("configNumber =" + str( jdatclass["MachineSetting"]["configNumber"]))
|
||||
jdatclass["Timestamp"] = GetDateTimeString()
|
||||
|
||||
Lines = file_tsv.readlines()
|
||||
for line in Lines:
|
||||
count += 1
|
||||
sp = line.split('\t')
|
||||
|
||||
#for rp in jdatclass["Recipe01"]:
|
||||
# if rp["productCode"] ==
|
||||
|
||||
|
||||
with open( output_file, "w") as outfile_js:
|
||||
json.dump( jdatclass, outfile_js,indent=4,ensure_ascii=False)
|
||||
|
||||
|
||||
|
||||
|
||||
# events - save any action done by merge
|
||||
events = []
|
||||
# last_change - current value of "LastChange"
|
||||
last_change = ""
|
||||
# last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map
|
||||
last_change_path = ""
|
||||
# master_json - main dictionary(map) for another json to merge into.
|
||||
master_json = {}
|
||||
# config_ver - target main dictionary's "configNumber"
|
||||
global config_ver
|
||||
config_ver = -1
|
||||
# holdonPD = the current product code that has been holding
|
||||
holdonPD = ""
|
||||
|
||||
# Set value of nested map (target - master_json) by using keys from 'key_list' and assigned value to it.
|
||||
def set_value_in_nested_map(key_list: list, value):
|
||||
reduce(getitem, key_list[:-1], master_json)[key_list[-1]] = value
|
||||
|
||||
# Get value of nested map by using keys from `key_list` with additional param `isMaster` for extra padding
|
||||
# in case of different size of length between master and dev.
|
||||
def get_value_in_nested_map(target_json: dict, key_list: list, isMaster=False):
|
||||
if "Recipe01" in key_list:
|
||||
if isMaster:
|
||||
key_list[1] += shared_master_position
|
||||
|
||||
if "SubMenu" in key_list:
|
||||
if len(reduce(getitem, key_list[:3], target_json)) <= 0:
|
||||
return "-"
|
||||
|
||||
return reduce(getitem, key_list[:-1], target_json)[key_list[-1]]
|
||||
|
||||
# Decode key that was in form of String to the list
|
||||
def decode_path(str_with_dot: str) -> list:
|
||||
path_map = str_with_dot.split('.')
|
||||
keylist = []
|
||||
for keyi in path_map:
|
||||
if keyi.isdigit():
|
||||
keyi = int(keyi)
|
||||
elif keyi[1:].isdigit():
|
||||
pass
|
||||
keylist.append(keyi)
|
||||
return keylist
|
||||
|
||||
# Fetch the product code by giving the path and source
|
||||
def fetch_pd(str_path: str, target_dict: dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("productCode")
|
||||
# print("decode and append get : ",keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch the material id by giving the path and source
|
||||
def fetch_matId(str_path: str, target_dict:dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("materialPathId")
|
||||
# print(keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch the default id by giving the path and source
|
||||
def fetch_defaultId(str_path: str, target_dict:dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("defaultIDSelect")
|
||||
# print(keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch some parts of the path; Ex. Recipe01.8.recipes.0 --> Recipe01.8
|
||||
def fetch_onlyMainMenuPath(str_with_dot: str):
|
||||
mainpath = decode_path(str_with_dot)[:2]
|
||||
# '.'.join(mainpath)
|
||||
return ".".join(str(p) for p in mainpath)
|
||||
|
||||
# Merge diff value or append it to the main file.
|
||||
# 0 - master_path = master.json / to be merged
|
||||
# 1 - dev_path = dev.json / to merge into another
|
||||
# 2 - outfile = output
|
||||
# 3 - changefile = track log
|
||||
# 4 - debug = enable debug
|
||||
def merge(args):
|
||||
print("ARGS: => ",args)
|
||||
|
||||
master_path = args[0]; dev_path = args[1]; outfile_path = args[2]; changefile_path = args[3];
|
||||
if len(args) > 4:
|
||||
global debug
|
||||
debug = args[4] if args[4] != None else False
|
||||
|
||||
#
|
||||
if (os.path.exists(master_path) == False) and str(master_path).isdigit():
|
||||
master_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(master_path) + ".json"
|
||||
|
||||
master_file = open(master_path, 'rb')
|
||||
|
||||
if (os.path.exists(dev_path) == False) and str(dev_path).isdigit():
|
||||
dev_path = CoffeeRecipeDirectory + "/coffeethai02_" + str(dev_path) + ".json"
|
||||
|
||||
dev_file = open(dev_path, 'rb')
|
||||
|
||||
masterName = master_file.name; devName = dev_file.name
|
||||
|
||||
master_file = master_file.raw.readall()
|
||||
dev_file = dev_file.raw.readall()
|
||||
print("Master file size => ",os.stat(master_path).st_size)
|
||||
print("Dev file size => ",os.stat(dev_path).st_size)
|
||||
#
|
||||
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
|
||||
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
|
||||
print(events[len(events) - 1])
|
||||
|
||||
# print("Test maintain => ", MaintianUnicodeEscapeDecoder.decoder(s=master_file))
|
||||
|
||||
global master_json
|
||||
global dev_json
|
||||
master_json_file:dict = json.loads(master_file)
|
||||
master_json = master_json_file.copy()
|
||||
dev_json_file:dict = json.loads(dev_file)
|
||||
dev_json = dev_json_file.copy()
|
||||
|
||||
config_ver = master_json["MachineSetting"]["configNumber"]
|
||||
|
||||
global pdchange
|
||||
pdchange = 0
|
||||
global pdadd
|
||||
pdadd = 0
|
||||
# global holdonPD
|
||||
holdonPD = ""
|
||||
merge_dicts(master_json, dev_json_file)
|
||||
# print(master_json["MachineSetting"]["configNumber"])
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("/".join(changefile_path.split("/")[:-1]))
|
||||
except:
|
||||
pass
|
||||
if (os.path.exists("/".join(outfile_path.split("/")[:-1])) == False):
|
||||
os.makedirs("/".join(outfile_path.split("/")[:-1]))
|
||||
|
||||
if (os.path.exists("/".join(changefile_path.split("/")[:-1])) == False):
|
||||
os.makedirs("/".join(changefile_path.split("/")[:-1]))
|
||||
|
||||
with open(outfile_path, "w+", encoding="utf-8") as outfile:
|
||||
json.dump(master_json, outfile, indent=2, ensure_ascii=False)
|
||||
|
||||
# Include counts
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
with open(changefile_path, "a+") as outlogfile:
|
||||
for event in events:
|
||||
outlogfile.write(event)
|
||||
|
||||
# Merge dictionary - called by `merge`, using when the value is `dict` type
|
||||
# original - main file to merge/append value into
|
||||
# updated - source of new value (must be in the same structure)
|
||||
# path - default = "". This variable is used for saving keys as path for accessing nested map
|
||||
def merge_dicts(original:dict, updated:dict, path=""):
|
||||
for key, value in updated.items():
|
||||
current_path = f"{path}.{key}" if path else key
|
||||
|
||||
if key in original:
|
||||
#
|
||||
if key == "Timestamp":
|
||||
global pre_timestamp
|
||||
pre_timestamp = value
|
||||
# change timestamp
|
||||
# original["Timestamp"] = GetDateTimeString()
|
||||
cfgnum = ""
|
||||
# events.append(GetDateTimeString()+"\t[TIMESTMP]\tLast Generated: "+value+cfgnum+"\tNew Generated at "+original["Timestamp"]+" \n")
|
||||
# print(events[len(events) - 1])
|
||||
else:
|
||||
if key == "LastChange":
|
||||
global last_change
|
||||
global last_change_path
|
||||
last_change = value
|
||||
last_change_path = current_path
|
||||
# print("[LastChange] LastChange: ",last_change_path, " value = ",value)
|
||||
if isinstance(value, dict) and isinstance(original[key], dict):
|
||||
merge_dicts(original[key], value, current_path)
|
||||
elif isinstance(value, list) and isinstance(original[key], list):
|
||||
merge_lists(original[key], value, current_path)
|
||||
else:
|
||||
# Detect diff
|
||||
if original[key] != value:
|
||||
if key == "configNumber":
|
||||
# use master version
|
||||
global config_ver
|
||||
config_ver = master_json["MachineSetting"]["configNumber"]
|
||||
# original[key] = config_ver
|
||||
cfgnum = "\t[VER."+str(config_ver)+"]" if config_ver != -1 else "\t[...]"
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Config number ", config_ver)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
else:
|
||||
lc = last_change if last_change != "" else pre_timestamp
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Encounter path --> "+path, " | master: ",original[key]," dev: ", value)
|
||||
except:
|
||||
pass
|
||||
if "Recipe01" in current_path and not "recipes" in current_path:
|
||||
global holdonPD
|
||||
global pdchange
|
||||
if "productCode" in original and holdonPD != original["productCode"]:
|
||||
holdonPD = original["productCode"]
|
||||
pdchange += 1
|
||||
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\" | \""+str(original["name"]).replace("\n","\\n")+"\"\n")
|
||||
# elif "MaterialSetting" in current_path:
|
||||
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
|
||||
# override original value by value from updated(dev)
|
||||
original[key] = value
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Add path --> "+path, " | master: "," dev: ", str(value))
|
||||
except:
|
||||
pass
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
original[key] = value
|
||||
|
||||
# Merge list - called by `merge_dict`, using when the value is `dict` type
|
||||
# original - main file to merge/append value into
|
||||
# updated - source of new value (must be in the same structure)
|
||||
# path - default = "". This variable is used for saving keys as path for accessing nested map
|
||||
#
|
||||
# Update v2: Fix bug where index matched but replacing unrelated
|
||||
# Update v2.1: Fix unrelated & new log format
|
||||
def merge_lists(original, updated, path=""):
|
||||
|
||||
for i, item in enumerate(updated):
|
||||
current_path = f"{path}.{i}"
|
||||
if isinstance(item, dict):
|
||||
if i < len(original) - 1 and isinstance(original[i], dict):
|
||||
# events.append("Merge dictionary: "+current_path)
|
||||
if path == "Recipe01":
|
||||
j = 0
|
||||
if "productCode" not in original[i].keys():
|
||||
key = "name"
|
||||
else:
|
||||
key = "productCode"
|
||||
while original[j][key] != item["productCode"] and j < len(original) - 1:
|
||||
j += 1
|
||||
|
||||
# override index; share index to other functions
|
||||
global shared_master_position
|
||||
shared_master_position = j - i
|
||||
|
||||
# print("Found target index: ", j," (master) ",i," (dev) " ," check matched? ", original[j][key] == item[key], " use key: ", key, " path: ", current_path )
|
||||
merge_dicts(original[j], item, current_path)
|
||||
else:
|
||||
merge_dicts(original[i], item, current_path)
|
||||
elif item not in original:
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Append dict@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
elif item not in original:
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Append list@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
command_line = sys.argv[1]
|
||||
if command_line == "name":
|
||||
UnifyGen.GetFileSheet("menu-name", ".menu-name.tsv")
|
||||
ImportFileAndUpdateMenuName( 529, ".menu-name.tsv", CoffeeRecipeDirectory + "/coffeethai02_530.json")
|
||||
if command_line == "merge":
|
||||
dev_version = sys.argv[2]
|
||||
merge(sys.argv[2:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
#./import_price.py ~/cofffeemachineConfig/profile_MYR/profile_MYR_1.json ~/Downloads/MYS\ Taobin\ Menu\ with\ price.xlsx\ -\ 28_Mar_2023.tsv F ~/cofffeemachineConfig/profile_MYR/profile_MYR_1_3.json
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue