feat: add multi files support for commit

Signed-off-by: Pakin <pakin.t@forth.co.th>
This commit is contained in:
Pakin 2026-05-06 10:00:06 +07:00
parent ae9d9fa66b
commit 166b7079ca

View file

@ -14,8 +14,8 @@ use bb8::{Pool, PooledConnection};
use bb8_redis::RedisConnectionManager;
use brotli::CompressorWriter;
use git2::{
Cred, FetchOptions, Object, ObjectType, PushOptions, RemoteCallbacks, Repository, ResetType,
build::CheckoutBuilder,
Cred, FetchOptions, Object, ObjectType, Oid, PushOptions, RemoteCallbacks, Repository,
ResetType, build::CheckoutBuilder,
};
use image::load_from_memory;
@ -392,6 +392,10 @@ async fn commit_handler(
let mut message: Option<String> = None;
let mut file_bytes: Option<Vec<u8>> = None;
let mut file_mappings: HashMap<String, Vec<u8>> = HashMap::new();
let mut filex_map: HashMap<String, Vec<u8>> = HashMap::new();
let mut pathx_map: HashMap<String, String> = HashMap::new();
// Process each field in the multipart payload
while let Ok(Some(field)) = payload.next_field().await {
let name = field.name().unwrap_or("").to_string();
@ -456,26 +460,48 @@ async fn commit_handler(
}
});
}
filex if name.starts_with("file") => match field.bytes().await {
Ok(fx) => {
filex_map.insert(filex.to_string(), fx.to_vec());
}
Err(e) => {
warn!("fail to read field multi file: {}", e);
}
},
pathx if name.starts_with("path") => match field.text().await {
Ok(px) => {
pathx_map.insert(pathx.to_string(), px);
}
Err(e) => {
warn!("fail to read field multi path: {}", e);
}
},
_ => {
// Ignore unknown fields
}
}
}
info!("committing ...");
// start do multi file if has any
// Validate required fields
let path = match path {
Some(p) => p,
None => {
return (
axum::http::StatusCode::BAD_REQUEST,
Json(json!({"error": "Missing required field: path"})),
);
if pathx_map.len() == filex_map.len() {
info!("mapping multi files");
for (_, (path_key, path_val)) in pathx_map.iter().enumerate() {
let expected_file_key = format!("file{}", path_key.replace("path", ""));
if filex_map.contains_key(&expected_file_key)
&& let Some(file_bytes) = filex_map.get(&expected_file_key)
{
info!("[multi-files] {}: {}", path_val, file_bytes.len());
file_mappings.insert(path_val.to_owned(), file_bytes.to_owned());
}
}
};
}
info!("path: [{path}]");
info!("committing ...");
let signature_username = match signature_username {
Some(su) => su,
@ -497,16 +523,6 @@ async fn commit_handler(
}
};
let file_bytes = match file_bytes {
Some(fb) => fb,
None => {
return (
axum::http::StatusCode::BAD_REQUEST,
Json(json!({"error": "Missing required field: file"})),
);
}
};
// Create signature
let signature = Signature {
username: signature_username,
@ -522,43 +538,53 @@ async fn commit_handler(
.map(|x| x.to_string())
.unwrap_or("master".to_string());
// Commit the file content directly from multipart upload
let commit_oid = match commit_file_content(
state.clone().repo,
&path,
&file_bytes,
signature,
&message.unwrap_or("update: from api".to_string()),
branch,
)
.await
let commit_oid = if let Some(file_bytes) = file_bytes
&& let Some(path) = path
{
Ok(oid) => oid,
Err(e) => {
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": e.to_string()})),
);
}
};
info!("commit success! [{commit_oid}]");
// save history
let redis_pre_lock = state.redis.clone();
{
if let Ok(mut rl) = redis_pre_lock.get().await {
match rl
.rpush(
format!("{}.history", path),
format!("commit-{}", commit_oid),
)
.await
{
Ok(_) => info!("history saved"),
Err(e) => error!("save history fail: {e:?}"),
match commit_file_content(
state.clone().repo,
&path,
&file_bytes,
signature,
&message.unwrap_or("update: from api".to_string()),
branch,
)
.await
{
Ok(oid) => oid,
Err(e) => {
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": e.to_string()})),
);
}
}
} else if !file_mappings.is_empty() {
match commit_multiple_file_contents(
state.clone().repo,
file_mappings,
signature,
&message.unwrap_or("update: from api".to_string()),
branch,
)
.await
{
Ok(oid) => oid,
Err(e) => {
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": e.to_string()})),
);
}
}
} else {
Oid::zero()
};
if commit_oid.is_zero() {
info!("invalid commit process, return 0");
} else {
info!("commit success! [{commit_oid}]");
}
(
@ -770,6 +796,69 @@ async fn commit_file_content(
Ok(oid)
}
async fn commit_multiple_file_contents(
repo: Arc<Mutex<Repository>>,
file_mapping: HashMap<String, Vec<u8>>,
author: Signature,
message: &str,
branch: String,
) -> Result<git2::Oid, Box<dyn std::error::Error>> {
let repo_guard = repo.lock().await;
let mut index = repo_guard.index()?;
let target_ref = format!("refs/heads/{branch}");
let parent_commit = match repo_guard.find_reference(&target_ref) {
Ok(reference) => {
let commit = reference.peel_to_commit()?;
index.read_tree(&commit.tree()?)?;
Some(commit)
}
Err(_) => None,
};
for (_, (path, file)) in file_mapping.iter().enumerate() {
let blob_oid = repo_guard.blob(file.as_slice())?;
index.add(&git2::IndexEntry {
ctime: git2::IndexTime::new(0, 0),
mtime: git2::IndexTime::new(0, 0),
dev: 0,
ino: 0,
mode: 0o100644,
uid: 0,
gid: 0,
file_size: file.len() as u32,
id: blob_oid,
flags: 0,
flags_extended: 0,
path: path.as_bytes().to_vec(),
})?;
}
let tree_oid = index.write_tree()?;
let tree = repo_guard.find_tree(tree_oid)?;
let sig = git2::Signature::now(&author.username, &author.email)?;
let parents = match &parent_commit {
Some(c) => vec![c],
None => vec![],
};
let oid = repo_guard.commit(
//"refs/heads/master"
Some(&target_ref),
&sig,
&sig,
message,
&tree,
&parents,
)?;
info!("commit oid: {oid}");
Ok(oid)
}
async fn push_handler(State(state): State<AppState>) -> impl IntoResponse {
let config = state.clone().get_all_configures();
let repo = state.repo.clone();