fix merge log json bug

This commit is contained in:
pakintada@gmail.com 2023-09-21 11:04:44 +07:00
parent 6b39392dfb
commit 17ad8486a6
3 changed files with 136 additions and 79 deletions

View file

@ -46,7 +46,7 @@ export class MergeComponent<T> {
this.targets.changelog_path = this.mergeForm.value.changelog_path!; this.targets.changelog_path = this.mergeForm.value.changelog_path!;
// TODO: Fetch merge. Modify this to websocket // TODO: Fetch merge. Modify this to websocket
let mergeLogs;
this.httpClient.post<T>(environment.api+"/merge", { this.httpClient.post<T>(environment.api+"/merge", {
master: this.targets.master_version, master: this.targets.master_version,
dev: this.targets.dev_version, dev: this.targets.dev_version,
@ -60,16 +60,18 @@ export class MergeComponent<T> {
if(typeof value === "object" && value !== null){ if(typeof value === "object" && value !== null){
if("message" in value){ if("message" in value){
// fetch html // fetch html
this.fetchLogsToDisplay("", true); this.fetchLogsToDisplay("", true, false);
// fetch log file // fetch log file
this.fetchLogsToDisplay("", false); this.fetchLogsToDisplay("", false, false);
// fetch json
mergeLogs = this.fetchLogsToDisplay("", false, true);
} }
} }
}, },
}) })
} }
fetchLogsToDisplay(query: string, isDisplayOnly: boolean){ fetchLogsToDisplay(query: string, isDisplayOnly: boolean, requestJson: boolean){
let additionalParams:string = "?query="; let additionalParams:string = "?query=";
if(query != ""){ if(query != ""){
additionalParams += query additionalParams += query
@ -77,8 +79,28 @@ export class MergeComponent<T> {
additionalParams = "" additionalParams = ""
} }
let jsontarget;
if(requestJson){
this.httpClient.post(environment.api+"/dllog"+additionalParams, { this.httpClient.post(environment.api+"/dllog"+additionalParams, {
htmlfile: isDisplayOnly, htmlfile: isDisplayOnly,
requestJson: requestJson
}, {
responseType: 'json',
withCredentials: true
}).subscribe({
next: (value) => {
jsontarget = value;
},
error: (err) => {
console.error('Error fetch json: ',err);
}
});
return jsontarget;
} else {
this.httpClient.post(environment.api+"/dllog"+additionalParams, {
htmlfile: isDisplayOnly,
requestJson: requestJson
},{ },{
responseType: 'blob', responseType: 'blob',
withCredentials: true, withCredentials: true,
@ -105,3 +127,4 @@ export class MergeComponent<T> {
) )
} }
} }
}

View file

@ -20,6 +20,7 @@ valid_len_product_code = len("12-01-02-0001")
# events - save any action done by merge # events - save any action done by merge
events = [] events = []
events_json = []
# last_change - current value of "LastChange" # last_change - current value of "LastChange"
last_change = "" last_change = ""
# last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map # last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map
@ -134,9 +135,7 @@ def merge(args):
print("Master file size => ",os.stat(master_path).st_size) print("Master file size => ",os.stat(master_path).st_size)
print("Dev file size => ",os.stat(dev_path).st_size) print("Dev file size => ",os.stat(dev_path).st_size)
# #
if isJson: events_json.append(create_map(events_action="MERGE", log=devName.split("/")[-1]+" into "+masterName.split("/")[-1]))
events.append(create_map(events_action="MERGE", log=devName.split("/")[-1]+" into "+masterName.split("/")[-1]))
else:
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n") events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n") # print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
print(events[len(events) - 1]) print(events[len(events) - 1])
@ -176,28 +175,38 @@ def merge(args):
# Include counts # Include counts
if isJson: events_json.append(create_map(events_action="COUNT", log="Total="+str(pdchange)))
events.append(create_map(events_action="COUNT", log="Total="+str(pdchange))) events_json.append(create_map(events_action="COUNT", log="Total="+str(pdadd)))
events.append(create_map(events_action="COUNT", log="Total="+str(pdadd))) events_json.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path))
events.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path)) events_json.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
events.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
else:
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n") events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n") events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n") events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n") events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
if isJson: # log json file
with open(changefile_path, "a+", encoding="utf-8") as outlogfile:
json.dump({"logs": events}, outlogfile, indent=2, ensure_ascii=False) if os.stat(file_path+".json").st_size == 0:
with open(file_path+".json", "w+",encoding="utf-8") as outlogfile:
json.dump({"logs"+GetDateTimeString()+"*": events_json}, outlogfile, indent=2, ensure_ascii=False)
else: else:
with open(changefile_path, "a+") as outlogfile: print(file_path+".json")
logjs:dict = json.loads(open(file_path+".json", encoding="utf-8").read())
logjs["logs"+GetDateTimeString()+"*"] = events_json
json.dump(logjs, open(file_path+".json", "w+", encoding="utf-8"), indent=2, ensure_ascii=False)
# log file
with open(file_path+".log", "a+") as outlogfile2:
try:
for event in events: for event in events:
outlogfile.write(event) outlogfile2.write(event)
except:
raise Exception(event)
# Create html version # Create html version
with open(changefile_path[:-3 if not isJson else -4]+"html", "a+") as outlogHtml: with open(file_path+".html", "a+") as outlogHtml:
for event in events: for event in events:
# Create div # Create div
# print("Log as list: ",str(event).split("\t")) # print("Log as list: ",str(event).split("\t"))
@ -273,10 +282,9 @@ def merge_dicts(original:dict, updated:dict, path=""):
except: except:
pass pass
if isJson: events_json.append(
events.append(
create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value))) create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value)))
else:
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n") events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
else: else:
lc = last_change if last_change != "" else pre_timestamp lc = last_change if last_change != "" else pre_timestamp
@ -291,10 +299,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
if "productCode" in original and holdonPD != original["productCode"]: if "productCode" in original and holdonPD != original["productCode"]:
holdonPD = original["productCode"] holdonPD = original["productCode"]
pdchange += 1 pdchange += 1
events_json.append(create_map(
if isJson:
events.append(
create_map(
events_action="CHANGE", events_action="CHANGE",
log="", log="",
additional=[ additional=[
@ -308,16 +313,14 @@ def merge_dicts(original:dict, updated:dict, path=""):
} }
] ]
)) ))
else:
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\" | \""+str(original["name"]).replace("\n","\\n")+"\"\n") events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\"\n")
# elif "MaterialSetting" in current_path: # elif "MaterialSetting" in current_path:
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n") # events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
# override original value by value from updated(dev) # override original value by value from updated(dev)
original[key] = value original[key] = value
else: else:
if isJson: events_json.append(create_map(
events.append(create_map(
events_action="INSERT+", events_action="INSERT+",
log="NewKeyValue", log="NewKeyValue",
additional=[{ additional=[{
@ -327,7 +330,6 @@ def merge_dicts(original:dict, updated:dict, path=""):
"fullpath": current_path "fullpath": current_path
}] }]
)) ))
else:
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n") events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
try: try:
if debug == "debug": if debug == "debug":
@ -375,8 +377,7 @@ def merge_lists(original, updated, path=""):
print("Append dict@ i=",i, " path: ", current_path) print("Append dict@ i=",i, " path: ", current_path)
except: except:
pass pass
if isJson: events_json.append(create_map(
events.append(create_map(
events_action="INSERT", events_action="INSERT",
log="AppendDict", log="AppendDict",
additional=[{ additional=[{
@ -385,7 +386,6 @@ def merge_lists(original, updated, path=""):
"fullpath": current_path "fullpath": current_path
}] }]
)) ))
else:
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n") events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
global pdadd global pdadd
pdadd += 1 pdadd += 1
@ -396,8 +396,7 @@ def merge_lists(original, updated, path=""):
print("Append list@ i=",i, " path: ", current_path) print("Append list@ i=",i, " path: ", current_path)
except: except:
pass pass
if isJson: events_json.append(create_map(
events.append(create_map(
events_action="INSERT", events_action="INSERT",
log="AppendList", log="AppendList",
additional=[{ additional=[{
@ -406,7 +405,6 @@ def merge_lists(original, updated, path=""):
"fullpath": current_path "fullpath": current_path
}] }]
)) ))
else:
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n") events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
pdadd += 1 pdadd += 1
original.append(item) original.append(item)

View file

@ -176,10 +176,11 @@ func (s *Server) createHandler() {
} }
defer merge_api.Close() defer merge_api.Close()
log.Println("Locate python api", merge_api.Name()) log.Println("Locate python api", merge_api.Name())
cmd := exec.Command(py_exec, merge_api.Name(), "merge", master_path, dev_path, output_path, changelog_path, "debug") cmd := exec.Command(py_exec, merge_api.Name(), "merge", master_path, dev_path, output_path, changelog_path)
log.Println("Run merge command", cmd) log.Println("Run merge command", cmd)
err = cmd.Run() out, err := cmd.CombinedOutput()
log.Println(string(out))
if err != nil { if err != nil {
log.Fatalln("Merge request failed. Python merge failed: ", err) log.Fatalln("Merge request failed. Python merge failed: ", err)
} }
@ -190,8 +191,28 @@ func (s *Server) createHandler() {
}) })
r.Get("/dllog", func(w http.ResponseWriter, r *http.Request) { r.Post("/dllog", func(w http.ResponseWriter, r *http.Request) {
changelog_path := "cofffeemachineConfig/changelog/testlog.html"
var postRequest map[string]interface{}
err := json.NewDecoder(r.Body).Decode(&postRequest)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
log.Fatalln("Log request failed: ", err)
return
}
file_ext := ".html"
if rb, ok := postRequest["htmlfile"].(bool); ok {
if rj, ok := postRequest["requestJson"].(bool); ok {
if rj {
file_ext = ".json"
}
} else if !rb {
file_ext = ".log"
}
}
changelog_path := "cofffeemachineConfig/changelog/testlog" + file_ext
logFile, err := os.Open(changelog_path) logFile, err := os.Open(changelog_path)
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
@ -199,7 +220,20 @@ func (s *Server) createHandler() {
defer logFile.Close() defer logFile.Close()
w.Header().Set("Content-Disposition", "attachment; filename=logfile.html") if file_ext == ".json" {
var logFileJson map[string]interface{}
err = json.NewDecoder(logFile).Decode(&logFileJson)
if err != nil {
log.Fatalf("Error when decode log file: %s", err)
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(logFileJson)
log.Println("Log file: ", changelog_path)
} else {
w.Header().Set("Content-Disposition", "attachment; filename=logfile"+file_ext)
w.Header().Set("Content-Type", "application/octet-stream") w.Header().Set("Content-Type", "application/octet-stream")
_, err = io.Copy(w, logFile) _, err = io.Copy(w, logFile)
@ -207,6 +241,8 @@ func (s *Server) createHandler() {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
} }
}
}) })
// Recipe Router // Recipe Router
rr := routers.NewRecipeRouter(database) rr := routers.NewRecipeRouter(database)