Add support for changelog.json
This commit is contained in:
parent
1b439edb77
commit
8221183630
2 changed files with 124 additions and 30 deletions
|
|
@ -3,7 +3,7 @@ import { CommonModule } from '@angular/common';
|
|||
|
||||
import { FormBuilder, ReactiveFormsModule } from '@angular/forms';
|
||||
import { MergeServiceService } from './merge-service.service';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { environment } from 'src/environments/environment.development';
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
|
|
@ -59,23 +59,27 @@ export class MergeComponent<T> {
|
|||
console.log(value)
|
||||
if(typeof value === "object" && value !== null){
|
||||
if("message" in value){
|
||||
console.log(value.message)
|
||||
// alert(value.message + " \n Fetching logs ...")
|
||||
this.downloadLogs("");
|
||||
// fetch html
|
||||
this.fetchLogsToDisplay("", true);
|
||||
// fetch log file
|
||||
this.fetchLogsToDisplay("", false);
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
downloadLogs(query: string){
|
||||
fetchLogsToDisplay(query: string, isDisplayOnly: boolean){
|
||||
let additionalParams:string = "?query=";
|
||||
if(query != ""){
|
||||
additionalParams += query
|
||||
} else {
|
||||
additionalParams = ""
|
||||
}
|
||||
this.httpClient.get(environment.api+"/dllog"+additionalParams, {
|
||||
|
||||
this.httpClient.post(environment.api+"/dllog"+additionalParams, {
|
||||
htmlfile: isDisplayOnly,
|
||||
},{
|
||||
responseType: 'blob',
|
||||
withCredentials: true,
|
||||
}).subscribe(
|
||||
|
|
@ -86,15 +90,13 @@ export class MergeComponent<T> {
|
|||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = 'logfile.log';
|
||||
a.innerText = "download log";
|
||||
console.log("Blob: ",blob.text())
|
||||
// document.body.appendChild(a);
|
||||
document.getElementById("log-dl")?.appendChild(a);
|
||||
document.getElementById("log-dl")!.className = "bg-yellow-500 rounded p-2";
|
||||
blob.text().then(v => document.getElementById("log-disp-texts")!.innerHTML = v);
|
||||
// a.click();
|
||||
|
||||
// window.URL.revokeObjectURL(url);
|
||||
a.innerText = "Click here to download as `.log` file";
|
||||
if(isDisplayOnly){
|
||||
blob.text().then(v => document.getElementById("log-disp-texts")!.innerHTML = v);
|
||||
} else {
|
||||
document.getElementById("log-dl")?.appendChild(a);
|
||||
document.getElementById("log-dl")!.className = "bg-yellow-500 rounded p-2 sticky top-0";
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
console.error('Error downloading log file: ',err);
|
||||
|
|
|
|||
|
|
@ -32,6 +32,10 @@ config_ver = -1
|
|||
# holdonPD = the current product code that has been holding
|
||||
holdonPD = ""
|
||||
|
||||
# output config
|
||||
global isJson
|
||||
isJson = False
|
||||
|
||||
# Set value of nested map (target - master_json) by using keys from 'key_list' and assigned value to it.
|
||||
def set_value_in_nested_map(key_list: list, value):
|
||||
reduce(getitem, key_list[:-1], master_json)[key_list[-1]] = value
|
||||
|
|
@ -82,6 +86,14 @@ def fetch_onlyMainMenuPath(str_with_dot: str):
|
|||
# '.'.join(mainpath)
|
||||
return ".".join(str(p) for p in mainpath)
|
||||
|
||||
def create_map(events_action: str, log: str, additional=[]) -> str:
|
||||
return {
|
||||
"timestamp": GetDateTimeString(),
|
||||
"event": events_action,
|
||||
"log": log,
|
||||
"addition": additional
|
||||
}
|
||||
|
||||
# Merge diff value or append it to the main file.
|
||||
# 0 - master_path = master.json / to be merged
|
||||
# 1 - dev_path = dev.json / to merge into another
|
||||
|
|
@ -92,6 +104,14 @@ def merge(args):
|
|||
print("ARGS: => ",args)
|
||||
|
||||
master_path = args[0]; dev_path = args[1]; outfile_path = args[2]; changefile_path = args[3];
|
||||
|
||||
file_path, out_ext = os.path.splitext(changefile_path)
|
||||
|
||||
if "json" in out_ext:
|
||||
global isJson
|
||||
isJson = True
|
||||
|
||||
|
||||
if len(args) > 4:
|
||||
global debug
|
||||
debug = args[4] if args[4] != None else False
|
||||
|
|
@ -114,7 +134,10 @@ def merge(args):
|
|||
print("Master file size => ",os.stat(master_path).st_size)
|
||||
print("Dev file size => ",os.stat(dev_path).st_size)
|
||||
#
|
||||
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
|
||||
if isJson:
|
||||
events.append(create_map(events_action="MERGE", log=devName.split("/")[-1]+" into "+masterName.split("/")[-1]))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[MERGE]\t\tMerging "+devName.split("/")[-1]+" into "+masterName.split("/")[-1]+"\n")
|
||||
# print(GetDateTimeString()+"\tMerging "+dev_file.name.split("/")[-1]+" into "+master_file.name.split("/")[-1]+"\n")
|
||||
print(events[len(events) - 1])
|
||||
|
||||
|
|
@ -152,17 +175,29 @@ def merge(args):
|
|||
json.dump(master_json, outfile, indent=2, ensure_ascii=False)
|
||||
|
||||
# Include counts
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
with open(changefile_path, "a+", encoding="utf-8") as outlogfile:
|
||||
for event in events:
|
||||
outlogfile.write(event)
|
||||
if isJson:
|
||||
events.append(create_map(events_action="COUNT", log="Total="+str(pdchange)))
|
||||
events.append(create_map(events_action="COUNT", log="Total="+str(pdadd)))
|
||||
events.append(create_map(events_action="OUTPUT", log="Finished! write output to "+outfile_path))
|
||||
events.append(create_map(events_action="LOG", log="Log is saved to "+changefile_path))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Change: "+str(pdchange)+"\n")
|
||||
events.append(GetDateTimeString()+"\t[COUNT]\t\tTotal Insert: "+str(pdadd)+"\n")
|
||||
|
||||
events.append(GetDateTimeString()+"\t[OUTPUT]\t\tFinished! write output to "+outfile_path+"\n")
|
||||
events.append(GetDateTimeString()+"\t[LOG]\t\tLog is saved to "+changefile_path+"\n")
|
||||
|
||||
if isJson:
|
||||
with open(changefile_path, "a+", encoding="utf-8") as outlogfile:
|
||||
json.dump({"logs": events}, outlogfile, indent=2, ensure_ascii=False)
|
||||
else:
|
||||
with open(changefile_path, "a+") as outlogfile:
|
||||
for event in events:
|
||||
outlogfile.write(event)
|
||||
|
||||
# Create html version
|
||||
with open(changefile_path[:-3]+"html", "a+", encoding="utf-8") as outlogHtml:
|
||||
with open(changefile_path[:-3 if not isJson else -4]+"html", "a+") as outlogHtml:
|
||||
for event in events:
|
||||
# Create div
|
||||
# print("Log as list: ",str(event).split("\t"))
|
||||
|
|
@ -237,7 +272,12 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
print("Config number ", config_ver)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
|
||||
if isJson:
|
||||
events.append(
|
||||
create_map(events_action="MERGE_TARGETS", log="Found `configNumber` => master version "+str(config_ver)+", merged with "+str(value)))
|
||||
else:
|
||||
events.append(GetDateTimeString()+cfgnum+"\tFound `configNumber` => master version "+str(config_ver)+", merged with "+str(value)+" \n")
|
||||
else:
|
||||
lc = last_change if last_change != "" else pre_timestamp
|
||||
try:
|
||||
|
|
@ -252,13 +292,43 @@ def merge_dicts(original:dict, updated:dict, path=""):
|
|||
holdonPD = original["productCode"]
|
||||
pdchange += 1
|
||||
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\" | \""+str(original["name"]).replace("\n","\\n")+"\"\n")
|
||||
if isJson:
|
||||
events.append(
|
||||
create_map(
|
||||
events_action="CHANGE",
|
||||
log="",
|
||||
additional=[
|
||||
{
|
||||
"version": config_ver,
|
||||
"master_last_change": (get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True)),
|
||||
"dev_last_change": last_change,
|
||||
"pd": holdonPD,
|
||||
"name": original["name"],
|
||||
"fullpath": current_path
|
||||
}
|
||||
]
|
||||
))
|
||||
else:
|
||||
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+" LastChange: "+(get_value_in_nested_map(master_json, decode_path(last_change_path), isMaster=True))+" (master) |\t"+last_change+" (dev)\t"+"---\t \""+holdonPD+"\" | \""+str(original["name"]).replace("\n","\\n")+"\"\n")
|
||||
# elif "MaterialSetting" in current_path:
|
||||
# events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"] [CHANGE]\t"+"\t MaterialSetting"+original["productCode"]+" --- "+" \""+str(value)+"\"("+original[key]+" // key:"+key+")\n")
|
||||
# override original value by value from updated(dev)
|
||||
original[key] = value
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_action="INSERT+",
|
||||
log="NewKeyValue",
|
||||
additional=[{
|
||||
"version": config_ver,
|
||||
"pd": holdonPD,
|
||||
"new_value": value,
|
||||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew key & value >>> \""+holdonPD+" \""+str(value)+("\t\t\t")+"\n")
|
||||
try:
|
||||
if debug == "debug":
|
||||
print("Add path --> "+path, " | master: "," dev: ", str(value))
|
||||
|
|
@ -305,7 +375,18 @@ def merge_lists(original, updated, path=""):
|
|||
print("Append dict@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_action="INSERT",
|
||||
log="AppendDict",
|
||||
additional=[{
|
||||
"version": config_ver,
|
||||
"pd": fetch_pd(fetch_onlyMainMenuPath(current_path), master_json),
|
||||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
global pdadd
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
|
@ -315,7 +396,18 @@ def merge_lists(original, updated, path=""):
|
|||
print("Append list@ i=",i, " path: ", current_path)
|
||||
except:
|
||||
pass
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
if isJson:
|
||||
events.append(create_map(
|
||||
events_action="INSERT",
|
||||
log="AppendList",
|
||||
additional=[{
|
||||
"version": config_ver,
|
||||
"pd": fetch_pd(fetch_onlyMainMenuPath(current_path), master_json),
|
||||
"fullpath": current_path
|
||||
}]
|
||||
))
|
||||
else:
|
||||
events.append(GetDateTimeString()+"\t[VER."+str(config_ver)+"]\t\t\t[INSERT]\t"+"\tNew value >>> "+fetch_pd(fetch_onlyMainMenuPath(current_path), master_json)+("\t\t\t")+"\n")
|
||||
pdadd += 1
|
||||
original.append(item)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue