Signed-off-by: Pakin <pakin.t@forth.co.th>
This commit is contained in:
Pakin 2026-02-27 14:09:21 +07:00
commit 551f4ec3ab
14 changed files with 5227 additions and 0 deletions

1
src/stream/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod model;

179
src/stream/model.rs Normal file
View file

@ -0,0 +1,179 @@
use rayon::iter::Either;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub trait IntoStreamMessage {
const MSG_NAME: &str;
fn build(&self) -> serde_json::Value;
fn get_id(&self) -> String;
}
/// Pre-flight metadata for streaming
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StreamDataStart {
/// Uuid v4, client must mapping later values with this stream id
pub stream_id: String,
/// Total amount items known
pub total_size: usize,
///
pub chunk_size: usize,
/// referer for checking steps in debugging
#[serde(rename = "ref")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_ref: Option<String>,
}
impl IntoStreamMessage for StreamDataStart {
const MSG_NAME: &str = "stream_data_start";
fn build(&self) -> serde_json::Value {
serde_json::json!({
"type": StreamDataStart::MSG_NAME,
"payload": self.clone()
})
}
fn get_id(&self) -> String {
self.stream_id.clone()
}
}
impl StreamDataStart {
pub fn new(total_size: usize, chunk_size: usize, stream_ref: Option<String>) -> Self {
Self {
stream_id: Uuid::new_v4().to_string(),
total_size,
chunk_size,
stream_ref,
}
}
pub fn as_msg(&self) -> serde_json::Value {
self.build()
}
}
/// Data splited into chunks not exceeding expected size
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StreamDataChunk<T> {
/// Uuid v4, client must mapping later values with this stream id
pub stream_id: String,
/// Actual index of first item in this chunk from full data
pub start_idx: usize,
/// Chunked data which splited into N items per chunk
pub data: Vec<T>,
}
impl<T> IntoStreamMessage for StreamDataChunk<T>
where
T: Serialize,
{
const MSG_NAME: &str = "stream_data_chunk";
fn build(&self) -> serde_json::Value {
serde_json::json!({
"type": StreamDataChunk::<T>::MSG_NAME,
"payload": self
})
}
fn get_id(&self) -> String {
self.stream_id.clone()
}
}
impl<T> StreamDataChunk<T>
where
T: Serialize,
{
pub fn new(sid: &str, start_idx: usize, data: Vec<T>) -> Self {
Self {
stream_id: sid.to_string(),
start_idx,
data,
}
}
pub fn as_msg(&self) -> serde_json::Value {
self.build()
}
}
/// Close message for signaling end of streaming
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StreamDataEnd {
/// Uuid v4, client must mapping later values with this stream id
pub stream_id: String,
}
impl IntoStreamMessage for StreamDataEnd {
const MSG_NAME: &str = "stream_data_end";
fn build(&self) -> serde_json::Value {
serde_json::json!({
"type": StreamDataEnd::MSG_NAME,
"payload": self.clone()
})
}
fn get_id(&self) -> String {
self.stream_id.clone()
}
}
impl StreamDataEnd {
pub fn new(sid: &str) -> Self {
Self {
stream_id: sid.to_string(),
}
}
pub fn as_msg(&self) -> serde_json::Value {
self.build()
}
}
/// Extra data send along with previous sent chunks,
/// must send before end msg.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StreamDataExtra<T> {
/// Uuid v4, client must mapping later values with this stream id
pub exid: String,
pub extp: String,
pub payload: Vec<T>,
}
impl<T> IntoStreamMessage for StreamDataExtra<T>
where
T: Serialize,
{
const MSG_NAME: &str = "stream_data_extra";
fn build(&self) -> serde_json::Value {
serde_json::json!({
"type": StreamDataExtra::<T>::MSG_NAME,
"payload": self
})
}
fn get_id(&self) -> String {
self.exid.clone()
}
}
impl<T> StreamDataExtra<T>
where
T: Serialize + Clone,
{
pub fn new(exid: &str, extp: &str, data: Vec<T>) -> Self {
Self {
exid: exid.to_string(),
extp: extp.to_string(),
payload: data.to_vec(),
}
}
pub fn as_msg(&self) -> serde_json::Value {
self.build()
}
}