Add py api, WIP dl log
This commit is contained in:
parent
35b8b3be57
commit
b49b2235db
7 changed files with 159 additions and 111 deletions
|
|
@ -1,78 +1,22 @@
|
|||
#!/usr/local/bin/python3
|
||||
#!/usr/bin/python3
|
||||
|
||||
from __future__ import print_function
|
||||
from functools import reduce
|
||||
from operator import getitem
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import sys
|
||||
import subprocess
|
||||
from io import StringIO
|
||||
import datetime
|
||||
from functools import reduce
|
||||
import json
|
||||
import string
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
|
||||
from operator import getitem
|
||||
import sys
|
||||
import os.path
|
||||
import os
|
||||
import itertools
|
||||
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
# /home/pakin/Codes/coffCfg/cofffeemachineConfig/coffeethai02_1550.json
|
||||
|
||||
import google.auth
|
||||
import UnifyGen
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
unicode = str
|
||||
|
||||
HomeDirectory = "/Users/wanloprungsiyangkul/"
|
||||
CoffeeRecipeDirectory = "/Users/wanloprungsiyangkul/cofffeemachineConfig"
|
||||
valid_len_product_code = len("12-01-02-0001")
|
||||
|
||||
def xl2int(s):
|
||||
s = s.strip().upper()
|
||||
return sum((ord(c)-ord('A')+1)*26**i
|
||||
for i,c in enumerate(reversed(s)))
|
||||
|
||||
def GetDateTimeString():
|
||||
now = datetime.now() # current date and time
|
||||
now = datetime.datetime.now() # current date and time
|
||||
date_time = now.strftime("%d-%b-%Y, %H:%M:%S")
|
||||
return date_time
|
||||
|
||||
|
||||
def ImportFileAndUpdateMenuName( recipe_version, menu_name_tsv, output_file):
|
||||
file_name_recipe = CoffeeRecipeDirectory + "/coffeethai02_" + str(recipe_version) + ".json"
|
||||
print("file_name_recipe = " + file_name_recipe)
|
||||
file_tsv = open( menu_name_tsv) #open requested file
|
||||
|
||||
with open( file_name_recipe, 'r') as JSON:
|
||||
jdatclass = json.load(JSON)
|
||||
|
||||
print("configNumber =" + str( jdatclass["MachineSetting"]["configNumber"]))
|
||||
jdatclass["Timestamp"] = GetDateTimeString()
|
||||
|
||||
Lines = file_tsv.readlines()
|
||||
for line in Lines:
|
||||
count += 1
|
||||
sp = line.split('\t')
|
||||
|
||||
#for rp in jdatclass["Recipe01"]:
|
||||
# if rp["productCode"] ==
|
||||
|
||||
|
||||
with open( output_file, "w") as outfile_js:
|
||||
json.dump( jdatclass, outfile_js,indent=4,ensure_ascii=False)
|
||||
|
||||
|
||||
|
||||
HomeDirectory = "/home/pakin"
|
||||
CoffeeRecipeDirectory = "/home/pakin/Codes/coffCfg/cofffeemachineConfig"
|
||||
valid_len_product_code = len("12-01-02-0001")
|
||||
|
||||
# events - save any action done by merge
|
||||
events = []
|
||||
|
|
@ -92,8 +36,6 @@ holdonPD = ""
|
|||
def set_value_in_nested_map(key_list: list, value):
|
||||
reduce(getitem, key_list[:-1], master_json)[key_list[-1]] = value
|
||||
|
||||
# Get value of nested map by using keys from `key_list` with additional param `isMaster` for extra padding
|
||||
# in case of different size of length between master and dev.
|
||||
def get_value_in_nested_map(target_json: dict, key_list: list, isMaster=False):
|
||||
if "Recipe01" in key_list:
|
||||
if isMaster:
|
||||
|
|
@ -117,28 +59,24 @@ def decode_path(str_with_dot: str) -> list:
|
|||
keylist.append(keyi)
|
||||
return keylist
|
||||
|
||||
# Fetch the product code by giving the path and source
|
||||
def fetch_pd(str_path: str, target_dict: dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("productCode")
|
||||
# print("decode and append get : ",keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch the material id by giving the path and source
|
||||
def fetch_matId(str_path: str, target_dict:dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("materialPathId")
|
||||
# print(keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch the default id by giving the path and source
|
||||
def fetch_defaultId(str_path: str, target_dict:dict) -> str:
|
||||
keyList = decode_path(str_with_dot=str_path)
|
||||
keyList.append("defaultIDSelect")
|
||||
# print(keyList)
|
||||
return get_value_in_nested_map(target_json=target_dict, key_list=keyList)
|
||||
|
||||
# Fetch some parts of the path; Ex. Recipe01.8.recipes.0 --> Recipe01.8
|
||||
def fetch_onlyMainMenuPath(str_with_dot: str):
|
||||
mainpath = decode_path(str_with_dot)[:2]
|
||||
# '.'.join(mainpath)
|
||||
|
|
@ -219,9 +157,42 @@ def merge(args):
|
|||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
with open(changefile_path, "a+") as outlogfile:
|
||||
with open(changefile_path, "a+", encoding="utf-8") as outlogfile:
|
||||
for event in events:
|
||||
outlogfile.write(event)
|
||||
|
||||
# Create html version
|
||||
with open(changefile_path[:-3]+"html", "a+", encoding="utf-8") as outlogHtml:
|
||||
for event in events:
|
||||
# Create div
|
||||
# print("Log as list: ",str(event).split("\t"))
|
||||
html_string = "\t<div class=\"flex\">\n"
|
||||
event_fraction = str(event).split("\t")
|
||||
for i in event_fraction:
|
||||
if i != "" and i != "\n" and i != "---":
|
||||
if "|" in i and not i.endswith("|"):
|
||||
# CHANGE
|
||||
spl_text = i.split("|")
|
||||
html_string += "\t\t<p>"+spl_text[0]+"</p>\n"
|
||||
html_string += "\t\t<p>"+spl_text[1].replace("\n","")+"</p>\n"
|
||||
elif ">>>" in i:
|
||||
# INSERT
|
||||
spl_text = i.split(">>>")
|
||||
html_string += "\t\t<p>"+spl_text[0]+"</p>\n"
|
||||
html_string += "\t\t<p>"+spl_text[1].replace("\n","")+"</p>\n"
|
||||
elif i.endswith("|"):
|
||||
html_string += "\t\t<p>"+i[:-1]+"</p>\n"
|
||||
else:
|
||||
# print("Default = ", i)
|
||||
# Either version, status or others
|
||||
|
||||
html_string += "\t\t<p>"+i.replace("\n","")+"</p>\n"
|
||||
html_string += "\t</div>\n"
|
||||
|
||||
outlogHtml.write(html_string)
|
||||
|
||||
|
||||
|
||||
|
||||
# Merge dictionary - called by `merge`, using when the value is `dict` type
|
||||
# original - main file to merge/append value into
|
||||
|
|
@ -271,7 +242,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
lc = last_change if last_change != "" else pre_timestamp
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Encounter path --> "+path, " | master: ",original[key]," dev: ", value)
|
||||
print("Encounter path --> "+current_path, " | master: ",original[key]," dev: ", value)
|
||||
except:
|
||||
pass
|
||||
if "Recipe01" in current_path and not "recipes" in current_path:
|
||||
|
|
@ -348,23 +319,13 @@ def merge_lists(original, updated, path=""):
|
|||
pdadd += 1
|
||||
original.append(item)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
command_line = sys.argv[1]
|
||||
if command_line == "name":
|
||||
UnifyGen.GetFileSheet("menu-name", ".menu-name.tsv")
|
||||
ImportFileAndUpdateMenuName( 529, ".menu-name.tsv", CoffeeRecipeDirectory + "/coffeethai02_530.json")
|
||||
print(sys.argv)
|
||||
if command_line == "merge":
|
||||
dev_version = sys.argv[2]
|
||||
merge(sys.argv[2:])
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
#./import_price.py ~/cofffeemachineConfig/profile_MYR/profile_MYR_1.json ~/Downloads/MYS\ Taobin\ Menu\ with\ price.xlsx\ -\ 28_Mar_2023.tsv F ~/cofffeemachineConfig/profile_MYR/profile_MYR_1_3.json
|
||||
|
||||
main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue