fix merge log json bug
This commit is contained in:
parent
6b39392dfb
commit
17ad8486a6
3 changed files with 136 additions and 79 deletions
|
|
@ -46,7 +46,7 @@ export class MergeComponent<T> {
|
|||
this.targets.changelog_path = this.mergeForm.value.changelog_path!;
|
||||
|
||||
// TODO: Fetch merge. Modify this to websocket
|
||||
|
||||
let mergeLogs;
|
||||
this.httpClient.post<T>(environment.api+"/merge", {
|
||||
master: this.targets.master_version,
|
||||
dev: this.targets.dev_version,
|
||||
|
|
@ -60,16 +60,18 @@ export class MergeComponent<T> {
|
|||
if(typeof value === "object" && value !== null){
|
||||
if("message" in value){
|
||||
// fetch html
|
||||
this.fetchLogsToDisplay("", true);
|
||||
this.fetchLogsToDisplay("", true, false);
|
||||
// fetch log file
|
||||
this.fetchLogsToDisplay("", false);
|
||||
this.fetchLogsToDisplay("", false, false);
|
||||
// fetch json
|
||||
mergeLogs = this.fetchLogsToDisplay("", false, true);
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fetchLogsToDisplay(query: string, isDisplayOnly: boolean){
|
||||
fetchLogsToDisplay(query: string, isDisplayOnly: boolean, requestJson: boolean){
|
||||
let additionalParams:string = "?query=";
|
||||
if(query != ""){
|
||||
additionalParams += query
|
||||
|
|
@ -77,31 +79,52 @@ export class MergeComponent<T> {
|
|||
additionalParams = ""
|
||||
}
|
||||
|
||||
this.httpClient.post(environment.api+"/dllog"+additionalParams, {
|
||||
htmlfile: isDisplayOnly,
|
||||
},{
|
||||
responseType: 'blob',
|
||||
withCredentials: true,
|
||||
}).subscribe(
|
||||
{
|
||||
let jsontarget;
|
||||
|
||||
if(requestJson){
|
||||
this.httpClient.post(environment.api+"/dllog"+additionalParams, {
|
||||
htmlfile: isDisplayOnly,
|
||||
requestJson: requestJson
|
||||
}, {
|
||||
responseType: 'json',
|
||||
withCredentials: true
|
||||
}).subscribe({
|
||||
next: (value) => {
|
||||
const blob = new Blob([value], { type: 'application/octet-stream' });
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = 'logfile.log';
|
||||
a.innerText = "Click here to download as `.log` file";
|
||||
if(isDisplayOnly){
|
||||
blob.text().then(v => document.getElementById("log-disp-texts")!.innerHTML = v);
|
||||
} else {
|
||||
document.getElementById("log-dl")?.appendChild(a);
|
||||
document.getElementById("log-dl")!.className = "bg-yellow-500 rounded p-2 sticky top-0";
|
||||
}
|
||||
jsontarget = value;
|
||||
},
|
||||
error: (err) => {
|
||||
console.error('Error downloading log file: ',err);
|
||||
console.error('Error fetch json: ',err);
|
||||
}
|
||||
}
|
||||
)
|
||||
});
|
||||
return jsontarget;
|
||||
} else {
|
||||
this.httpClient.post(environment.api+"/dllog"+additionalParams, {
|
||||
htmlfile: isDisplayOnly,
|
||||
requestJson: requestJson
|
||||
},{
|
||||
responseType: 'blob',
|
||||
withCredentials: true,
|
||||
}).subscribe(
|
||||
{
|
||||
next: (value) => {
|
||||
const blob = new Blob([value], { type: 'application/octet-stream' });
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = 'logfile.log';
|
||||
a.innerText = "Click here to download as `.log` file";
|
||||
if(isDisplayOnly){
|
||||
blob.text().then(v => document.getElementById("log-disp-texts")!.innerHTML = v);
|
||||
} else {
|
||||
document.getElementById("log-dl")?.appendChild(a);
|
||||
document.getElementById("log-dl")!.className = "bg-yellow-500 rounded p-2 sticky top-0";
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
console.error('Error downloading log file: ',err);
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ valid_len_product_code = len("12-01-02-0001")
|
|||
|
||||
# events - save any action done by merge
|
||||
events = []
|
||||
events_json = []
|
||||
# last_change - current value of "LastChange"
|
||||
last_change = ""
|
||||
# last_change_path - key mapping to "LastChange" key, joined by '.' and could be splitted to access value in nested map
|
||||
|
|
@ -134,10 +135,8 @@ def merge(args):
|
|||
print("Master file size => ",os.stat(master_path).st_size)
|
||||
print("Dev file size => ",os.stat(dev_path).st_size)
|
||||
#
|
||||
if isJson:
|
||||
events.append(create_map(events_action="MERGE", log=devName.split("/")[-1]+" into "+masterName.split("/")[-1]))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
|
||||
events_json.append(create_map(events_action="MERGE", log=devName.split("/")[-1]+" into "+masterName.split("/")[-1]))
|
||||
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
|
||||
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
|
||||
print(events[len(events) - 1])
|
||||
|
||||
|
|
@ -176,28 +175,38 @@ def merge(args):
|
|||
|
||||
# Include counts
|
||||
|
||||
if isJson:
|
||||
events.append(create_map(events_action="COUNT", log="Total="+str(pdchange)))
|
||||
events.append(create_map(events_action="COUNT", log="Total="+str(pdadd)))
|
||||
events.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path))
|
||||
events.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
events_json.append(create_map(events_action="COUNT", log="Total="+str(pdchange)))
|
||||
events_json.append(create_map(events_action="COUNT", log="Total="+str(pdadd)))
|
||||
events_json.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path))
|
||||
events_json.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
|
||||
if isJson:
|
||||
with open(changefile_path, "a+", encoding="utf-8") as outlogfile:
|
||||
json.dump({"logs": events}, outlogfile, indent=2, ensure_ascii=False)
|
||||
# log json file
|
||||
|
||||
if os.stat(file_path+".json").st_size == 0:
|
||||
with open(file_path+".json", "w+",encoding="utf-8") as outlogfile:
|
||||
json.dump({"logs"+GetDateTimeString()+"*": events_json}, outlogfile, indent=2, ensure_ascii=False)
|
||||
else:
|
||||
with open(changefile_path, "a+") as outlogfile:
|
||||
print(file_path+".json")
|
||||
logjs:dict = json.loads(open(file_path+".json", encoding="utf-8").read())
|
||||
logjs["logs"+GetDateTimeString()+"*"] = events_json
|
||||
json.dump(logjs, open(file_path+".json", "w+", encoding="utf-8"), indent=2, ensure_ascii=False)
|
||||
|
||||
# log file
|
||||
with open(file_path+".log", "a+") as outlogfile2:
|
||||
try:
|
||||
|
||||
for event in events:
|
||||
outlogfile.write(event)
|
||||
outlogfile2.write(event)
|
||||
except:
|
||||
raise Exception(event)
|
||||
|
||||
# Create html version
|
||||
with open(changefile_path[:-3 if not isJson else -4]+"html", "a+") as outlogHtml:
|
||||
with open(file_path+".html", "a+") as outlogHtml:
|
||||
for event in events:
|
||||
# Create div
|
||||
# print("Log as list: ",str(event).split("\t"))
|
||||
|
|
@ -273,11 +282,10 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
except:
|
||||
pass
|
||||
|
||||
if isJson:
|
||||
events.append(
|
||||
create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value)))
|
||||
else:
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
events_json.append(
|
||||
create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value)))
|
||||
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
else:
|
||||
lc = last_change if last_change != "" else pre_timestamp
|
||||
try:
|
||||
|
|
@ -291,10 +299,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
if "productCode" in original and holdonPD != original["productCode"]:
|
||||
holdonPD = original["productCode"]
|
||||
pdchange += 1
|
||||
|
||||
if isJson:
|
||||
events.append(
|
||||
create_map(
|
||||
events_json.append(create_map(
|
||||
events_action="CHANGE",
|
||||
log="",
|
||||
additional=[
|
||||
|
|
@ -308,16 +313,14 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
}
|
||||
]
|
||||
))
|
||||
else:
|
||||
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\" | \""+str(original["name"]).replace("\n","\\n")+"\"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\"\n")
|
||||
# elif "MaterialSetting" in current_path:
|
||||
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
|
||||
# override original value by value from updated(dev)
|
||||
original[key] = value
|
||||
else:
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_json.append(create_map(
|
||||
events_action="INSERT+",
|
||||
log="NewKeyValue",
|
||||
additional=[{
|
||||
|
|
@ -327,8 +330,7 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Add path --> "+path, " | master: "," dev: ", str(value))
|
||||
|
|
@ -375,8 +377,7 @@ def merge_lists(original, updated, path=""):
|
|||
print("Append dict@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_json.append(create_map(
|
||||
events_action="INSERT",
|
||||
log="AppendDict",
|
||||
additional=[{
|
||||
|
|
@ -385,8 +386,7 @@ def merge_lists(original, updated, path=""):
|
|||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
|
@ -396,8 +396,7 @@ def merge_lists(original, updated, path=""):
|
|||
print("Append list@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_json.append(create_map(
|
||||
events_action="INSERT",
|
||||
log="AppendList",
|
||||
additional=[{
|
||||
|
|
@ -406,8 +405,7 @@ def merge_lists(original, updated, path=""):
|
|||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
||||
|
|
|
|||
|
|
@ -176,10 +176,11 @@ func (s *Server) createHandler() {
|
|||
}
|
||||
defer merge_api.Close()
|
||||
log.Println("Locate python api", merge_api.Name())
|
||||
cmd := exec.Command(py_exec, merge_api.Name(), "merge", master_path, dev_path, output_path, changelog_path, "debug")
|
||||
cmd := exec.Command(py_exec, merge_api.Name(), "merge", master_path, dev_path, output_path, changelog_path)
|
||||
|
||||
log.Println("Run merge command", cmd)
|
||||
err = cmd.Run()
|
||||
out, err := cmd.CombinedOutput()
|
||||
log.Println(string(out))
|
||||
if err != nil {
|
||||
log.Fatalln("Merge request failed. Python merge failed: ", err)
|
||||
}
|
||||
|
|
@ -190,8 +191,28 @@ func (s *Server) createHandler() {
|
|||
|
||||
})
|
||||
|
||||
r.Get("/dllog", func(w http.ResponseWriter, r *http.Request) {
|
||||
changelog_path := "cofffeemachineConfig/changelog/testlog.html"
|
||||
r.Post("/dllog", func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var postRequest map[string]interface{}
|
||||
err := json.NewDecoder(r.Body).Decode(&postRequest)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Fatalln("Log request failed: ", err)
|
||||
return
|
||||
}
|
||||
|
||||
file_ext := ".html"
|
||||
if rb, ok := postRequest["htmlfile"].(bool); ok {
|
||||
if rj, ok := postRequest["requestJson"].(bool); ok {
|
||||
if rj {
|
||||
file_ext = ".json"
|
||||
}
|
||||
} else if !rb {
|
||||
file_ext = ".log"
|
||||
}
|
||||
}
|
||||
|
||||
changelog_path := "cofffeemachineConfig/changelog/testlog" + file_ext
|
||||
logFile, err := os.Open(changelog_path)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
|
@ -199,14 +220,29 @@ func (s *Server) createHandler() {
|
|||
|
||||
defer logFile.Close()
|
||||
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=logfile.html")
|
||||
w.Header().Set("Content-Type", "application/octet-stream")
|
||||
if file_ext == ".json" {
|
||||
|
||||
_, err = io.Copy(w, logFile)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
var logFileJson map[string]interface{}
|
||||
err = json.NewDecoder(logFile).Decode(&logFileJson)
|
||||
if err != nil {
|
||||
log.Fatalf("Error when decode log file: %s", err)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(logFileJson)
|
||||
log.Println("Log file: ", changelog_path)
|
||||
} else {
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=logfile"+file_ext)
|
||||
w.Header().Set("Content-Type", "application/octet-stream")
|
||||
|
||||
_, err = io.Copy(w, logFile)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
// Recipe Router
|
||||
rr := routers.NewRecipeRouter(database)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue