fix: correct critical bugs and improve pipe streaming performance
Critical bug fixes:
- save_item now returns real Item from database, not a hardcoded fake
- AsyncDataService::save() reuses self.sync_service instead of creating redundant instance
- GenerateStatus trait signature mismatch fixed (CLI/API decoupling)
Performance improvements (pipe path untouched):
- CompressionEngine::open() returns Box<dyn Read + Send> enabling true streaming
- mode_get eliminates triple full-file read (was sampling then re-reading entire file)
- FilteringReader adds fast-path bypass when no filters, pre-allocates temp buffer
- text.rs meta plugin processes &[u8] slice directly, eliminates data.to_vec() clone
API correctness:
- Tag parse errors now return 400 instead of being silently discarded
- compute_diff uses similar crate (LCS-based) instead of naive positional comparison
Cleanup:
- Modernize string formatting (format!({x})) across codebase
- Remove redundant DB query in get mode
- Derive Debug/ToSchema on public types
- Delete placeholder test files with no real assertions
- Extract parse_comma_tags utility function
This commit is contained in:
@@ -216,7 +216,7 @@ pub fn settings_meta_plugin_types(
|
||||
if !found {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!("Unknown meta plugin type: {}", trimmed_name),
|
||||
format!("Unknown meta plugin type: {trimmed_name}"),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
@@ -254,10 +254,7 @@ pub fn settings_compression_type(
|
||||
if compression_type_opt.is_err() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!(
|
||||
"Invalid compression algorithm '{}'. Supported algorithms: lz4, gzip, xz, zstd",
|
||||
compression_name
|
||||
),
|
||||
format!("Invalid compression algorithm '{compression_name}'. Supported algorithms: lz4, gzip, xz, zstd"),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
@@ -66,8 +66,9 @@ pub fn mode_delete(
|
||||
warn!("Unable to find item {item_id} in database");
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow::Error::from(e)
|
||||
.context(format!("Failed to delete item {}", item_id)));
|
||||
return Err(
|
||||
anyhow::Error::from(e).context(format!("Failed to delete item {item_id}"))
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -8,11 +8,7 @@ use anyhow::{Context, Result};
|
||||
use clap::Command;
|
||||
use log::debug;
|
||||
|
||||
fn validate_diff_args(
|
||||
_cmd: &mut Command,
|
||||
ids: &Vec<i64>,
|
||||
tags: &Vec<String>,
|
||||
) -> anyhow::Result<()> {
|
||||
fn validate_diff_args(_cmd: &mut Command, ids: &[i64], tags: &[String]) -> anyhow::Result<()> {
|
||||
if !tags.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Tags are not supported with --diff. Please provide exactly two IDs."
|
||||
@@ -137,9 +133,46 @@ pub fn mode_diff(
|
||||
|
||||
let (path_a, path_b) = setup_diff_paths_and_compression(&item_service, &item_a, &item_b)?;
|
||||
|
||||
// TODO: Implement actual diff logic here
|
||||
// For now, just print paths or something to make it compile
|
||||
println!("Diff between {:?} and {:?}", path_a, path_b);
|
||||
run_external_diff(&path_a, &path_b)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Runs external diff command to compare two files.
|
||||
///
|
||||
/// Uses the system's `diff` command to generate a unified diff output.
|
||||
/// Returns an error if the diff command is not found.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path_a` - Path to the first file.
|
||||
/// * `path_b` - Path to the second file.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<()>` - Success or error.
|
||||
fn run_external_diff(path_a: &std::path::Path, path_b: &std::path::Path) -> anyhow::Result<()> {
|
||||
if which::which_global("diff").is_err() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"diff command not found. Please install diffutils."
|
||||
));
|
||||
}
|
||||
|
||||
let mut child = std::process::Command::new("diff")
|
||||
.arg("-u")
|
||||
.arg(path_a)
|
||||
.arg(path_b)
|
||||
.stdout(std::process::Stdio::inherit())
|
||||
.stderr(std::process::Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn diff command")?;
|
||||
|
||||
let status = child.wait().context("Failed to wait for diff command")?;
|
||||
|
||||
// diff returns 0 if files are identical, 1 if different, 2 on error
|
||||
if status.code() == Some(2) {
|
||||
Err(anyhow::anyhow!("diff command failed with an error"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,13 +186,13 @@ pub fn mode_generate_config(_cmd: &mut Command, _settings: &crate::config::Setti
|
||||
if line.trim().is_empty() {
|
||||
line.to_string()
|
||||
} else {
|
||||
format!("# {}", line)
|
||||
format!("# {line}")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
|
||||
println!("{}", commented_yaml);
|
||||
println!("{commented_yaml}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::io::Write;
|
||||
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::common::is_binary::is_binary;
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::config;
|
||||
use crate::filter_plugin::FilterChain;
|
||||
use crate::services::item_service::ItemService;
|
||||
@@ -73,32 +73,35 @@ pub fn mode_get(
|
||||
}
|
||||
}
|
||||
|
||||
// Get a reader that applies the filters using the pre-parsed filter chain
|
||||
let (mut reader, _, _) = item_service.get_item_content_info_streaming_with_chain(
|
||||
conn,
|
||||
item_id,
|
||||
filter_chain.as_ref(),
|
||||
)?;
|
||||
|
||||
if detect_binary {
|
||||
// Read only the first 8192 bytes for binary detection
|
||||
// Binary detection: sample first 8KB, then create a fresh reader for the full output.
|
||||
let (mut sample_reader, _, _) = item_service
|
||||
.get_item_content_info_streaming_with_item(item_with_meta, filter_chain.as_ref())?;
|
||||
let mut sample_buffer = vec![0; PIPESIZE];
|
||||
let bytes_read = reader.read(&mut sample_buffer)?;
|
||||
let bytes_read = sample_reader.read(&mut sample_buffer)?;
|
||||
if is_binary(&sample_buffer[..bytes_read]) {
|
||||
return Err(anyhow!(
|
||||
"Refusing to output binary data to TTY, use --force to override"
|
||||
));
|
||||
}
|
||||
// We need to create a new reader since we consumed some bytes
|
||||
let (new_reader, _, _) = item_service.get_item_content_info_streaming_with_chain(
|
||||
// Create fresh reader for actual output (sampling consumed the first reader)
|
||||
let (reader, _, _) = item_service.get_item_content_info_streaming_with_chain(
|
||||
conn,
|
||||
item_id,
|
||||
filter_chain.as_ref(),
|
||||
)?;
|
||||
reader = new_reader;
|
||||
stream_to_stdout(reader)?;
|
||||
} else {
|
||||
// No binary detection needed, use the already-fetched item with meta
|
||||
let (reader, _, _) = item_service
|
||||
.get_item_content_info_streaming_with_item(item_with_meta, filter_chain.as_ref())?;
|
||||
stream_to_stdout(reader)?;
|
||||
}
|
||||
|
||||
// Stream the content to stdout
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stream_to_stdout(mut reader: Box<dyn Read + Send>) -> Result<()> {
|
||||
let mut stdout = std::io::stdout();
|
||||
let mut buffer = [0; PIPESIZE];
|
||||
loop {
|
||||
|
||||
@@ -157,7 +157,7 @@ fn show_item(
|
||||
]);
|
||||
|
||||
let mut item_path_buf = data_path.clone();
|
||||
item_path_buf.push(item.id.unwrap().to_string());
|
||||
item_path_buf.push(item_id.to_string());
|
||||
let path_str = item_path_buf
|
||||
.to_str()
|
||||
.expect("Unable to get item path")
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use axum::{
|
||||
http::{header, StatusCode},
|
||||
http::{StatusCode, header},
|
||||
response::Response,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use log;
|
||||
use serde::Serialize;
|
||||
|
||||
pub struct ResponseBuilder;
|
||||
|
||||
@@ -13,7 +13,7 @@ impl ResponseBuilder {
|
||||
log::warn!("Failed to serialize response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.header(header::CONTENT_LENGTH, json.len().to_string())
|
||||
@@ -23,7 +23,7 @@ impl ResponseBuilder {
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
pub fn binary(content: &[u8], mime_type: &str) -> Result<Response, StatusCode> {
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, mime_type)
|
||||
|
||||
@@ -1,27 +1,33 @@
|
||||
use crate::modes::server::api::common::ResponseBuilder;
|
||||
use crate::modes::server::common::{
|
||||
ApiResponse, AppState, ItemContentQuery, ItemInfo, ItemInfoListResponse, ItemInfoResponse,
|
||||
ItemQuery, ListItemsQuery, MetadataResponse, TagsQuery,
|
||||
ApiResponse, AppState, CreateItemQuery, ItemContentQuery, ItemInfo, ItemInfoListResponse,
|
||||
ItemInfoResponse, ItemQuery, ListItemsQuery, MetadataResponse, TagsQuery,
|
||||
};
|
||||
use crate::services::async_item_service::AsyncItemService;
|
||||
use crate::services::async_data_service::AsyncDataService;
|
||||
use crate::services::data_service::DataService;
|
||||
use crate::services::error::CoreError;
|
||||
use crate::services::utils::parse_comma_tags;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, Query, State},
|
||||
http::{StatusCode, header},
|
||||
response::{Json, Response},
|
||||
};
|
||||
use http_body_util::BodyExt;
|
||||
use log::{debug, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::io::{Cursor, Read};
|
||||
use tokio::task;
|
||||
|
||||
// Helper functions to replace the missing binary_detection module
|
||||
async fn check_binary_content_allowed(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
) -> Result<(), StatusCode> {
|
||||
if !allow_binary {
|
||||
let is_binary = is_content_binary(item_service, item_id, metadata).await?;
|
||||
let is_binary = is_content_binary(data_service, item_id, metadata).await?;
|
||||
if is_binary {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
@@ -31,7 +37,7 @@ async fn check_binary_content_allowed(
|
||||
|
||||
/// Helper function to determine if content is binary
|
||||
async fn is_content_binary(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
) -> Result<bool, StatusCode> {
|
||||
@@ -39,7 +45,7 @@ async fn is_content_binary(
|
||||
Ok(text_val == "false")
|
||||
} else {
|
||||
// If text metadata isn't set, we need to check the content using streaming approach
|
||||
match item_service
|
||||
match data_service
|
||||
.get_item_content_info_streaming(item_id, None)
|
||||
.await
|
||||
{
|
||||
@@ -56,44 +62,6 @@ async fn is_content_binary(
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to replace missing build_filter_string
|
||||
fn build_filter_string(_params: &ItemQuery) -> Option<String> {
|
||||
// Implement this based on your needs
|
||||
None
|
||||
}
|
||||
|
||||
// Create a simple ResponseBuilder to replace the missing one
|
||||
struct ResponseBuilder;
|
||||
|
||||
impl ResponseBuilder {
|
||||
pub fn json<T: serde::Serialize>(data: T) -> Result<Response, StatusCode> {
|
||||
let json = serde_json::to_vec(&data).map_err(|e| {
|
||||
log::warn!("Failed to serialize response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.header(header::CONTENT_LENGTH, json.len().to_string())
|
||||
.body(axum::body::Body::from(json))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
|
||||
pub fn binary(content: &[u8], mime_type: &str) -> Result<Response, StatusCode> {
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, mime_type)
|
||||
.header(header::CONTENT_LENGTH, content.len().to_string())
|
||||
.body(axum::body::Body::from(content.to_vec()))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to get mime type from metadata
|
||||
fn get_mime_type(metadata: &HashMap<String, String>) -> String {
|
||||
metadata
|
||||
@@ -130,14 +98,12 @@ fn handle_item_error(error: CoreError) -> StatusCode {
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to create AsyncItemService from AppState
|
||||
fn create_item_service(state: &AppState) -> AsyncItemService {
|
||||
AsyncItemService::new(
|
||||
/// Helper function to create AsyncDataService from AppState
|
||||
fn create_data_service(state: &AppState) -> AsyncDataService {
|
||||
AsyncDataService::new(
|
||||
state.data_dir.clone(),
|
||||
state.db.clone(),
|
||||
state.item_service.clone(),
|
||||
state.cmd.clone(),
|
||||
state.settings.clone(),
|
||||
state.db.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -170,11 +136,17 @@ pub async fn handle_list_items(
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.map(|s| {
|
||||
parse_comma_tags(s).map_err(|e| {
|
||||
warn!("Failed to parse tags: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
let mut items_with_meta = item_service
|
||||
let data_service = create_data_service(&state);
|
||||
let mut items_with_meta = data_service
|
||||
.list_items(tags, HashMap::new())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@@ -226,31 +198,31 @@ pub async fn handle_list_items(
|
||||
|
||||
/// Handle as_meta=true response by returning JSON with metadata and content
|
||||
async fn handle_as_meta_response(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Get the item with metadata
|
||||
let item_with_meta = item_service.get_item(item_id).await.map_err(|e| {
|
||||
let item_with_meta = data_service.get_item(item_id).await.map_err(|e| {
|
||||
warn!("Failed to get item {} for as_meta content: {}", item_id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
handle_as_meta_response_with_metadata(item_service, item_id, &metadata, offset, length).await
|
||||
handle_as_meta_response_with_metadata(data_service, item_id, &metadata, offset, length).await
|
||||
}
|
||||
|
||||
/// Handle as_meta=true response with pre-fetched metadata
|
||||
async fn handle_as_meta_response_with_metadata(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Check if content is binary
|
||||
let is_binary = is_content_binary(item_service, item_id, metadata).await?;
|
||||
let is_binary = is_content_binary(data_service, item_id, metadata).await?;
|
||||
|
||||
// Get the content if it's not binary
|
||||
if is_binary {
|
||||
@@ -268,7 +240,7 @@ async fn handle_as_meta_response_with_metadata(
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
|
||||
} else {
|
||||
// Get the content as text
|
||||
match item_service.get_item_content_info(item_id, None).await {
|
||||
match data_service.get_item_content_info(item_id, None).await {
|
||||
Ok((content, _, _)) => {
|
||||
// Apply offset and length
|
||||
let content_len = content.len() as u64;
|
||||
@@ -330,7 +302,8 @@ async fn handle_as_meta_response_with_metadata(
|
||||
path = "/api/item/",
|
||||
operation_id = "keep_post_item",
|
||||
summary = "Store new item",
|
||||
description = "Upload content to store as a new item. Content is compressed, analyzed for metadata, and stored.",
|
||||
description = "Upload content to store as a new item. Content is compressed, analyzed for metadata, and stored. \
|
||||
Query parameters: tags (comma-separated), metadata (JSON string). Body: raw binary content.",
|
||||
responses(
|
||||
(status = 201, description = "Item created", body = ItemInfoResponse),
|
||||
(status = 400, description = "Bad request"),
|
||||
@@ -338,26 +311,95 @@ async fn handle_as_meta_response_with_metadata(
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
request_body(
|
||||
content = String,
|
||||
description = "Content to store",
|
||||
content = Vec<u8>,
|
||||
description = "Raw binary content to store",
|
||||
content_type = "application/octet-stream"
|
||||
),
|
||||
params(
|
||||
("tags" = Option<String>, Query, description = "Comma-separated tags to associate with the item"),
|
||||
("metadata" = Option<String>, Query, description = "Metadata as JSON string")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_post_item(
|
||||
State(_state): State<AppState>,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<CreateItemQuery>,
|
||||
body: Body,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
// This is a simplified implementation
|
||||
// In a real implementation, you'd need to properly parse multipart/form-data
|
||||
// or JSON payload with the item data
|
||||
let db = state.db.clone();
|
||||
let data_dir = state.data_dir.clone();
|
||||
let settings = state.settings.clone();
|
||||
|
||||
let response = ApiResponse::<ItemInfo> {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some("POST /api/item/ not yet implemented".to_string()),
|
||||
// Parse tags from query parameter
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_deref()
|
||||
.map(|s| {
|
||||
parse_comma_tags(s).map_err(|e| {
|
||||
warn!("Failed to parse tags query parameter: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse metadata from query parameter
|
||||
let metadata: HashMap<String, String> = if let Some(ref meta_str) = params.metadata {
|
||||
serde_json::from_str(meta_str).map_err(|e| {
|
||||
warn!("Failed to parse metadata JSON string: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})?
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
|
||||
// Convert body to bytes first (simpler than streaming for this use case)
|
||||
let body_bytes = body
|
||||
.collect()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
warn!("Failed to read request body: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})?
|
||||
.to_bytes();
|
||||
|
||||
let item_with_meta = task::spawn_blocking(move || {
|
||||
let mut conn = db.blocking_lock();
|
||||
let mut cursor = Cursor::new(body_bytes.to_vec());
|
||||
let sync_service =
|
||||
crate::services::SyncDataService::new(data_dir, settings.as_ref().clone());
|
||||
sync_service.save_item_with_reader(&mut conn, &mut cursor, tags, metadata)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| {
|
||||
warn!("Failed to save item: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.map_err(|e| {
|
||||
warn!("Failed to save item: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let compression = item_with_meta.item.compression.clone();
|
||||
let tags = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
|
||||
let item_info = ItemInfo {
|
||||
id: item_with_meta.item.id.unwrap(),
|
||||
ts: item_with_meta.item.ts.to_rfc3339(),
|
||||
size: item_with_meta.item.size,
|
||||
compression,
|
||||
tags,
|
||||
metadata,
|
||||
};
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_info),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
@@ -397,13 +439,19 @@ pub async fn handle_get_item_latest_content(
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.map(|s| {
|
||||
parse_comma_tags(s).map_err(|e| {
|
||||
warn!("Failed to parse tags: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
let data_service = create_data_service(&state);
|
||||
|
||||
// First find the item to get its ID and metadata
|
||||
let item_with_meta = item_service.find_item(vec![], tags, HashMap::new()).await;
|
||||
let item_with_meta = data_service.find_item(vec![], tags, HashMap::new()).await;
|
||||
|
||||
match item_with_meta {
|
||||
Ok(item) => {
|
||||
@@ -413,7 +461,7 @@ pub async fn handle_get_item_latest_content(
|
||||
if params.as_meta {
|
||||
// Force stream=false and allow_binary=false for as_meta=true
|
||||
handle_as_meta_response_with_metadata(
|
||||
&item_service,
|
||||
&data_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
params.offset,
|
||||
@@ -422,7 +470,7 @@ pub async fn handle_get_item_latest_content(
|
||||
.await
|
||||
} else {
|
||||
stream_item_content_response_with_metadata(
|
||||
&item_service,
|
||||
&data_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
params.allow_binary,
|
||||
@@ -484,14 +532,12 @@ pub async fn handle_get_item_content(
|
||||
item_id, params.stream, params.allow_binary, params.offset, params.length
|
||||
);
|
||||
|
||||
let filter = build_filter_string(¶ms);
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
let data_service = create_data_service(&state);
|
||||
// Handle as_meta parameter
|
||||
if params.as_meta {
|
||||
// Force stream=false and allow_binary=false for as_meta=true
|
||||
let result =
|
||||
handle_as_meta_response(&item_service, item_id, params.offset, params.length).await;
|
||||
handle_as_meta_response(&data_service, item_id, params.offset, params.length).await;
|
||||
if let Ok(response) = &result {
|
||||
debug!(
|
||||
"ITEM_API: Response content-length: {:?}",
|
||||
@@ -501,13 +547,13 @@ pub async fn handle_get_item_content(
|
||||
result
|
||||
} else {
|
||||
let result = stream_item_content_response(
|
||||
&item_service,
|
||||
&data_service,
|
||||
item_id,
|
||||
params.allow_binary,
|
||||
params.offset,
|
||||
params.length,
|
||||
params.stream,
|
||||
filter,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
if let Ok(response) = &result {
|
||||
@@ -521,44 +567,44 @@ pub async fn handle_get_item_content(
|
||||
}
|
||||
|
||||
async fn stream_item_content_response(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
stream: bool,
|
||||
filter: Option<String>,
|
||||
_filter: Option<String>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
debug!("STREAM_ITEM_CONTENT_RESPONSE: stream={}", stream);
|
||||
// Get the item with metadata once
|
||||
let item_with_meta = item_service.get_item(item_id).await.map_err(|e| {
|
||||
let item_with_meta = data_service.get_item(item_id).await.map_err(|e| {
|
||||
warn!("Failed to get item {} for content: {}", item_id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
stream_item_content_response_with_metadata(
|
||||
item_service,
|
||||
data_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
allow_binary,
|
||||
offset,
|
||||
length,
|
||||
stream,
|
||||
filter,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn stream_item_content_response_with_metadata(
|
||||
item_service: &AsyncItemService,
|
||||
data_service: &AsyncDataService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
stream: bool,
|
||||
filter: Option<String>,
|
||||
_filter: Option<String>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
debug!(
|
||||
"STREAM_ITEM_CONTENT_RESPONSE_WITH_METADATA: stream={}",
|
||||
@@ -567,14 +613,12 @@ async fn stream_item_content_response_with_metadata(
|
||||
let mime_type = get_mime_type(metadata);
|
||||
|
||||
// Check if content is binary when allow_binary is false
|
||||
check_binary_content_allowed(item_service, item_id, metadata, allow_binary).await?;
|
||||
check_binary_content_allowed(data_service, item_id, metadata, allow_binary).await?;
|
||||
|
||||
if stream {
|
||||
debug!("STREAMING: Using streaming approach");
|
||||
match item_service
|
||||
.stream_item_content_by_id_with_metadata(
|
||||
item_id, metadata, true, offset, length, filter,
|
||||
)
|
||||
match data_service
|
||||
.stream_item_content_by_id_with_metadata(item_id, metadata, true, offset, length, None)
|
||||
.await
|
||||
{
|
||||
Ok((stream, _)) => {
|
||||
@@ -592,7 +636,7 @@ async fn stream_item_content_response_with_metadata(
|
||||
}
|
||||
} else {
|
||||
debug!("NON-STREAMING: Building full response in memory");
|
||||
match item_service.get_item_content_info(item_id, filter).await {
|
||||
match data_service.get_item_content_info(item_id, None).await {
|
||||
Ok((content, _, _)) => {
|
||||
let response_content = apply_offset_length(&content, offset, length);
|
||||
|
||||
@@ -639,12 +683,18 @@ pub async fn handle_get_item_latest_meta(
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.map(|s| {
|
||||
parse_comma_tags(s).map_err(|e| {
|
||||
warn!("Failed to parse tags: {}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
let data_service = create_data_service(&state);
|
||||
|
||||
match item_service.find_item(vec![], tags, HashMap::new()).await {
|
||||
match data_service.find_item(vec![], tags, HashMap::new()).await {
|
||||
Ok(item_with_meta) => {
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
|
||||
@@ -685,9 +735,9 @@ pub async fn handle_get_item_meta(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<i64>,
|
||||
) -> Result<Json<ApiResponse<HashMap<String, String>>>, StatusCode> {
|
||||
let item_service = create_item_service(&state);
|
||||
let data_service = create_data_service(&state);
|
||||
|
||||
match item_service.get_item(item_id).await {
|
||||
match data_service.get_item(item_id).await {
|
||||
Ok(item_with_meta) => {
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
|
||||
@@ -724,22 +774,24 @@ pub async fn handle_delete_item(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<i64>,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
let conn = state.db.lock().await;
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let sync_service =
|
||||
crate::services::SyncDataService::new(state.data_dir.clone(), state.settings.clone());
|
||||
let sync_service = crate::services::SyncDataService::new(
|
||||
state.data_dir.clone(),
|
||||
state.settings.as_ref().clone(),
|
||||
);
|
||||
|
||||
let deleted_item = sync_service
|
||||
.delete_item(&mut conn.clone(), item_id)
|
||||
.delete_item(&mut conn, item_id)
|
||||
.map_err(handle_item_error)?;
|
||||
|
||||
let item_info = ItemInfo {
|
||||
id: deleted_item.id,
|
||||
ts: deleted_item.ts,
|
||||
id: deleted_item.id.unwrap(),
|
||||
ts: deleted_item.ts.to_rfc3339(),
|
||||
size: deleted_item.size,
|
||||
compression: deleted_item.compression,
|
||||
tags: vec![],
|
||||
meta: HashMap::new(),
|
||||
metadata: HashMap::new(),
|
||||
};
|
||||
|
||||
let response = ApiResponse {
|
||||
@@ -772,24 +824,27 @@ pub async fn handle_get_item_info(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<i64>,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
let conn = state.db.lock().await;
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let sync_service =
|
||||
crate::services::SyncDataService::new(state.data_dir.clone(), state.settings.clone());
|
||||
let sync_service = crate::services::SyncDataService::new(
|
||||
state.data_dir.clone(),
|
||||
state.settings.as_ref().clone(),
|
||||
);
|
||||
|
||||
let item_with_meta = sync_service
|
||||
.get_item(&mut conn.clone(), item_id)
|
||||
.get_item(&mut conn, item_id)
|
||||
.map_err(handle_item_error)?;
|
||||
|
||||
let tags: Vec<String> = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
|
||||
let item_info = ItemInfo {
|
||||
id: item_with_meta.item.id,
|
||||
ts: item_with_meta.item.ts,
|
||||
id: item_with_meta.item.id.unwrap(),
|
||||
ts: item_with_meta.item.ts.to_rfc3339(),
|
||||
size: item_with_meta.item.size,
|
||||
compression: item_with_meta.item.compression,
|
||||
compression: item_with_meta.item.compression.clone(),
|
||||
tags,
|
||||
meta: item_with_meta.meta_as_map(),
|
||||
metadata,
|
||||
};
|
||||
|
||||
let response = ApiResponse {
|
||||
@@ -834,18 +889,20 @@ pub async fn handle_diff_items(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<DiffQuery>,
|
||||
) -> Result<Json<ApiResponse<Vec<String>>>, StatusCode> {
|
||||
let conn = state.db.lock().await;
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let sync_service =
|
||||
crate::services::SyncDataService::new(state.data_dir.clone(), state.settings.clone());
|
||||
let sync_service = crate::services::SyncDataService::new(
|
||||
state.data_dir.clone(),
|
||||
state.settings.as_ref().clone(),
|
||||
);
|
||||
|
||||
let item_a = if let Some(id_a) = query.id_a {
|
||||
sync_service
|
||||
.get_item(&mut conn.clone(), id_a)
|
||||
.get_item(&mut conn, id_a)
|
||||
.map_err(handle_item_error)?
|
||||
} else if let Some(tag) = &query.tag_a {
|
||||
sync_service
|
||||
.find_item(&mut conn.clone(), vec![], vec![tag.clone()], HashMap::new())
|
||||
.find_item(&mut conn, vec![], vec![tag.clone()], HashMap::new())
|
||||
.map_err(handle_item_error)?
|
||||
} else {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
@@ -853,24 +910,24 @@ pub async fn handle_diff_items(
|
||||
|
||||
let item_b = if let Some(id_b) = query.id_b {
|
||||
sync_service
|
||||
.get_item(&mut conn.clone(), id_b)
|
||||
.get_item(&mut conn, id_b)
|
||||
.map_err(handle_item_error)?
|
||||
} else if let Some(tag) = &query.tag_b {
|
||||
sync_service
|
||||
.find_item(&mut conn.clone(), vec![], vec![tag.clone()], HashMap::new())
|
||||
.find_item(&mut conn, vec![], vec![tag.clone()], HashMap::new())
|
||||
.map_err(handle_item_error)?
|
||||
} else {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
};
|
||||
|
||||
let id_a = item_a.item.id.unwrap();
|
||||
let id_b = item_b.item.id.unwrap();
|
||||
let id_a = item_a.item.id.ok_or_else(|| StatusCode::BAD_REQUEST)?;
|
||||
let id_b = item_b.item.id.ok_or_else(|| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let (reader_a, _) = sync_service
|
||||
.get_content(&mut conn.clone(), id_a)
|
||||
let (mut reader_a, _) = sync_service
|
||||
.get_content(&mut conn, id_a)
|
||||
.map_err(handle_item_error)?;
|
||||
let (reader_b, _) = sync_service
|
||||
.get_content(&mut conn.clone(), id_b)
|
||||
let (mut reader_b, _) = sync_service
|
||||
.get_content(&mut conn, id_b)
|
||||
.map_err(handle_item_error)?;
|
||||
|
||||
let mut content_a = Vec::new();
|
||||
@@ -900,31 +957,30 @@ fn compute_diff(a: &[u8], b: &[u8]) -> Vec<String> {
|
||||
let text_a = String::from_utf8_lossy(a);
|
||||
let text_b = String::from_utf8_lossy(b);
|
||||
|
||||
let lines_a: Vec<&str> = text_a.lines().collect();
|
||||
let lines_b: Vec<&str> = text_b.lines().collect();
|
||||
let old_lines: Vec<&str> = text_a.lines().collect();
|
||||
let new_lines: Vec<&str> = text_b.lines().collect();
|
||||
|
||||
let ops = similar::TextDiff::from_lines(
|
||||
text_a.as_ref(),
|
||||
text_b.as_ref(),
|
||||
)
|
||||
.ops();
|
||||
|
||||
let mut diff_lines = Vec::new();
|
||||
|
||||
let max_lines = std::cmp::max(lines_a.len(), lines_b.len());
|
||||
for i in 0..max_lines {
|
||||
let line_a = lines_a.get(i).copied();
|
||||
let line_b = lines_b.get(i).copied();
|
||||
|
||||
match (line_a, line_b) {
|
||||
(Some(la), Some(lb)) if la == lb => {
|
||||
diff_lines.push(format!(" {}", la));
|
||||
for op in ops {
|
||||
for change in op.iter_changes(&old_lines, &new_lines) {
|
||||
match change.tag() {
|
||||
similar::ChangeTag::Equal => {
|
||||
diff_lines.push(format!(" {}", change.value()));
|
||||
}
|
||||
similar::ChangeTag::Delete => {
|
||||
diff_lines.push(format!("- {}", change.value()));
|
||||
}
|
||||
similar::ChangeTag::Insert => {
|
||||
diff_lines.push(format!("+ {}", change.value()));
|
||||
}
|
||||
}
|
||||
(Some(la), Some(lb)) => {
|
||||
diff_lines.push(format!("- {}", la));
|
||||
diff_lines.push(format!("+ {}", lb));
|
||||
}
|
||||
(Some(la), None) => {
|
||||
diff_lines.push(format!("- {}", la));
|
||||
}
|
||||
(None, Some(lb)) => {
|
||||
diff_lines.push(format!("+ {}", lb));
|
||||
}
|
||||
(None, None) => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod common;
|
||||
#[cfg(feature = "swagger")]
|
||||
pub mod item;
|
||||
#[cfg(feature = "mcp")]
|
||||
@@ -59,7 +60,7 @@ use utoipa_swagger_ui::SwaggerUi;
|
||||
struct ApiDoc;
|
||||
|
||||
pub fn add_routes(router: Router<AppState>) -> Router<AppState> {
|
||||
let router = router
|
||||
let mut router = router
|
||||
// Status endpoints
|
||||
.route("/api/status", get(status::handle_status))
|
||||
.route("/api/plugins/status", get(status::handle_plugins_status))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use axum::{extract::State, http::StatusCode, response::Json};
|
||||
|
||||
use crate::modes::server::common::{AppState, StatusInfoResponse};
|
||||
use crate::modes::server::common::{ApiResponse, AppState, StatusInfoResponse};
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
@@ -76,7 +76,7 @@ pub async fn handle_status(
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Serialize)]
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize, utoipa::ToSchema)]
|
||||
pub struct PluginsStatusResponse {
|
||||
pub meta_plugins: std::collections::HashMap<String, crate::common::status::MetaPluginInfo>,
|
||||
pub filter_plugins: Vec<crate::common::status::FilterPluginInfo>,
|
||||
@@ -90,7 +90,7 @@ pub struct PluginsStatusResponse {
|
||||
summary = "Get plugins status",
|
||||
description = "Retrieve detailed status of all available plugins including meta, filter, and compression plugins.",
|
||||
responses(
|
||||
(status = 200, description = "Plugins status retrieved", body = ApiResponse),
|
||||
(status = 200, description = "Plugins status retrieved", body = ApiResponse<PluginsStatusResponse>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
|
||||
@@ -567,6 +567,17 @@ fn default_as_meta() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Query parameters for creating an item via POST.
|
||||
///
|
||||
/// Query parameters for POST /api/item/ with streaming binary body.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CreateItemQuery {
|
||||
/// Optional comma-separated tags to associate with the item.
|
||||
pub tags: Option<String>,
|
||||
/// Optional metadata as JSON string.
|
||||
pub metadata: Option<String>,
|
||||
}
|
||||
|
||||
/// Request body for creating a new item.
|
||||
///
|
||||
/// Contains the content to store and optional tags.
|
||||
|
||||
@@ -157,7 +157,7 @@ fn build_meta_plugins_configured_table(status_info: &StatusInfo) -> Option<Table
|
||||
if key == &value_str {
|
||||
enabled_output_pairs.push((key.clone(), key.clone()));
|
||||
} else {
|
||||
enabled_output_pairs.push((key.clone(), format!("{}->{}", key, value_str)));
|
||||
enabled_output_pairs.push((key.clone(), format!("{key}->{value_str}")));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user