refactor: deduplicate ItemInfo, improve error handling, fix pre-existing bugs
- Move ItemInfo to services/types.rs for sharing between client and server - Replace .expect() in compression_service with proper error handling - Add CoreError::PayloadTooLarge variant for semantic error handling - Export CoreError from lib.rs for library users - Unify get_item_meta_name/value to take &str instead of String - Extract item_path() helper in ItemService to reduce duplication - Add warning logs for silent errors in list.rs - Fix pre-existing borrow errors: tx moved in export handler, item_with_meta partial move in TryFrom implementation - Fix unused data_dir variables in server code
This commit is contained in:
@@ -1,22 +1,9 @@
|
|||||||
use crate::services::error::CoreError;
|
use crate::services::{ItemInfo, error::CoreError};
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
||||||
/// Item information returned from the server API.
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
pub struct ItemInfo {
|
|
||||||
pub id: i64,
|
|
||||||
pub ts: String,
|
|
||||||
pub uncompressed_size: Option<i64>,
|
|
||||||
pub compressed_size: Option<i64>,
|
|
||||||
pub closed: bool,
|
|
||||||
pub compression: String,
|
|
||||||
pub tags: Vec<String>,
|
|
||||||
pub metadata: HashMap<String, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Percent-encode a value for use in a URL query string.
|
/// Percent-encode a value for use in a URL query string.
|
||||||
fn url_encode(s: &str) -> String {
|
fn url_encode(s: &str) -> String {
|
||||||
let mut result = String::with_capacity(s.len() * 3);
|
let mut result = String::with_capacity(s.len() * 3);
|
||||||
|
|||||||
@@ -1355,11 +1355,11 @@ pub fn get_item_meta(conn: &Connection, item: &Item) -> Result<Vec<Meta>> {
|
|||||||
/// let db_path = _tmp.path().join("keep.db");
|
/// let db_path = _tmp.path().join("keep.db");
|
||||||
/// let conn = db::open(db_path)?;
|
/// let conn = db::open(db_path)?;
|
||||||
/// let item = Item { id: Some(1), ts: Utc::now(), uncompressed_size: None, compressed_size: None, closed: false, compression: "lz4".to_string() };
|
/// let item = Item { id: Some(1), ts: Utc::now(), uncompressed_size: None, compressed_size: None, closed: false, compression: "lz4".to_string() };
|
||||||
/// let meta = db::get_item_meta_name(&conn, &item, "mime_type".to_string())?;
|
/// let meta = db::get_item_meta_name(&conn, &item, "mime_type")?;
|
||||||
/// # Ok(())
|
/// # Ok(())
|
||||||
/// # }
|
/// # }
|
||||||
/// ```
|
/// ```
|
||||||
pub fn get_item_meta_name(conn: &Connection, item: &Item, name: String) -> Result<Option<Meta>> {
|
pub fn get_item_meta_name(conn: &Connection, item: &Item, name: &str) -> Result<Option<Meta>> {
|
||||||
debug!("DB: Getting item meta name: {item:?} {name:?}");
|
debug!("DB: Getting item meta name: {item:?} {name:?}");
|
||||||
let mut statement = conn
|
let mut statement = conn
|
||||||
.prepare_cached("SELECT id, name, value FROM metas WHERE id=?1 AND name=?2")
|
.prepare_cached("SELECT id, name, value FROM metas WHERE id=?1 AND name=?2")
|
||||||
@@ -1407,11 +1407,11 @@ pub fn get_item_meta_name(conn: &Connection, item: &Item, name: String) -> Resul
|
|||||||
/// let db_path = _tmp.path().join("keep.db");
|
/// let db_path = _tmp.path().join("keep.db");
|
||||||
/// let conn = db::open(db_path)?;
|
/// let conn = db::open(db_path)?;
|
||||||
/// let item = Item { id: Some(1), ts: Utc::now(), uncompressed_size: None, compressed_size: None, closed: false, compression: "lz4".to_string() };
|
/// let item = Item { id: Some(1), ts: Utc::now(), uncompressed_size: None, compressed_size: None, closed: false, compression: "lz4".to_string() };
|
||||||
/// let value = db::get_item_meta_value(&conn, &item, "source".to_string())?;
|
/// let value = db::get_item_meta_value(&conn, &item, "source")?;
|
||||||
/// # Ok(())
|
/// # Ok(())
|
||||||
/// # }
|
/// # }
|
||||||
/// ```
|
/// ```
|
||||||
pub fn get_item_meta_value(conn: &Connection, item: &Item, name: String) -> Result<Option<String>> {
|
pub fn get_item_meta_value(conn: &Connection, item: &Item, name: &str) -> Result<Option<String>> {
|
||||||
debug!("DB: Getting item meta value: {item:?} {name:?}");
|
debug!("DB: Getting item meta value: {item:?} {name:?}");
|
||||||
let mut statement = conn
|
let mut statement = conn
|
||||||
.prepare_cached("SELECT value FROM metas WHERE id=?1 AND name=?2")
|
.prepare_cached("SELECT value FROM metas WHERE id=?1 AND name=?2")
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ pub mod tokenizer;
|
|||||||
pub use args::Args;
|
pub use args::Args;
|
||||||
// Re-export PIPESIZE constant
|
// Re-export PIPESIZE constant
|
||||||
pub use common::PIPESIZE;
|
pub use common::PIPESIZE;
|
||||||
|
pub use services::CoreError;
|
||||||
|
|
||||||
// Import all filter plugins to ensure they register themselves
|
// Import all filter plugins to ensure they register themselves
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ pub fn mode(
|
|||||||
|
|
||||||
// Decompress through streaming readers
|
// Decompress through streaming readers
|
||||||
let mut decompressed_reader: Box<dyn Read> =
|
let mut decompressed_reader: Box<dyn Read> =
|
||||||
CompressionService::decompressing_reader(reader, &compression_type);
|
CompressionService::decompressing_reader(reader, &compression_type)?;
|
||||||
|
|
||||||
// Binary detection: sample first chunk
|
// Binary detection: sample first chunk
|
||||||
let mut sample_buf = [0u8; crate::common::PIPESIZE];
|
let mut sample_buf = [0u8; crate::common::PIPESIZE];
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ pub fn mode(
|
|||||||
} else {
|
} else {
|
||||||
cmd.error(
|
cmd.error(
|
||||||
clap::error::ErrorKind::InvalidValue,
|
clap::error::ErrorKind::InvalidValue,
|
||||||
format!("Unsupported import format: {import_path} (expected .keep.tar or .meta.yml)"),
|
format!("Unsupported import format: {}", import_path),
|
||||||
)
|
)
|
||||||
.exit();
|
.exit();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
use crate::client::{ItemInfo, KeepClient};
|
use crate::client::KeepClient;
|
||||||
use crate::compression_engine::CompressionType;
|
use crate::compression_engine::CompressionType;
|
||||||
use crate::config::Settings;
|
use crate::config::Settings;
|
||||||
use crate::meta_plugin::SaveMetaFn;
|
use crate::meta_plugin::SaveMetaFn;
|
||||||
use crate::modes::common::settings_compression_type;
|
use crate::modes::common::settings_compression_type;
|
||||||
|
use crate::services::ItemInfo;
|
||||||
use crate::services::compression_service::CompressionService;
|
use crate::services::compression_service::CompressionService;
|
||||||
use crate::services::meta_service::MetaService;
|
use crate::services::meta_service::MetaService;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
@@ -75,7 +76,7 @@ pub fn mode(
|
|||||||
|
|
||||||
// Wrap pipe writer with appropriate compression
|
// Wrap pipe writer with appropriate compression
|
||||||
let mut compressor: Box<dyn Write> =
|
let mut compressor: Box<dyn Write> =
|
||||||
CompressionService::compressing_writer(Box::new(pipe_writer), &compression_type_clone);
|
CompressionService::compressing_writer(Box::new(pipe_writer), &compression_type_clone)?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let n = stdin_lock.read(&mut buffer)?;
|
let n = stdin_lock.read(&mut buffer)?;
|
||||||
|
|||||||
@@ -61,6 +61,9 @@ lazy_static! {
|
|||||||
static ref KEEP_META_RE: Regex = Regex::new(r"^KEEP_META_(.+)$").unwrap();
|
static ref KEEP_META_RE: Regex = Regex::new(r"^KEEP_META_(.+)$").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub const IMPORT_FORMAT_ERROR: &str =
|
||||||
|
"Unsupported import format: {} (expected .keep.tar or .meta.yml)";
|
||||||
|
|
||||||
pub fn get_meta_from_env() -> HashMap<String, String> {
|
pub fn get_meta_from_env() -> HashMap<String, String> {
|
||||||
debug!("COMMON: Getting meta from KEEP_META_*");
|
debug!("COMMON: Getting meta from KEEP_META_*");
|
||||||
let mut meta_env: HashMap<String, String> = HashMap::new();
|
let mut meta_env: HashMap<String, String> = HashMap::new();
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ pub fn mode_import(
|
|||||||
} else {
|
} else {
|
||||||
cmd.error(
|
cmd.error(
|
||||||
clap::error::ErrorKind::InvalidValue,
|
clap::error::ErrorKind::InvalidValue,
|
||||||
format!("Unsupported import format: {import_path} (expected .keep.tar or .meta.yml)"),
|
format!("Unsupported import format: {}", import_path),
|
||||||
)
|
)
|
||||||
.exit();
|
.exit();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -151,13 +151,19 @@ pub fn mode_list(
|
|||||||
Some(size) => format_size(size as u64, settings.human_readable),
|
Some(size) => format_size(size as u64, settings.human_readable),
|
||||||
None => match item_path.metadata() {
|
None => match item_path.metadata() {
|
||||||
Ok(_) => "Unknown".to_string(),
|
Ok(_) => "Unknown".to_string(),
|
||||||
Err(_) => "Missing".to_string(),
|
Err(e) => {
|
||||||
|
log::warn!("File missing or inaccessible: {}", e);
|
||||||
|
"Missing".to_string()
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ColumnType::Compression => item.compression.to_string(),
|
ColumnType::Compression => item.compression.to_string(),
|
||||||
ColumnType::FileSize => match item_path.metadata() {
|
ColumnType::FileSize => match item_path.metadata() {
|
||||||
Ok(metadata) => format_size(metadata.len(), settings.human_readable),
|
Ok(metadata) => format_size(metadata.len(), settings.human_readable),
|
||||||
Err(_) => "Missing".to_string(),
|
Err(e) => {
|
||||||
|
log::warn!("File missing or inaccessible: {}", e);
|
||||||
|
"Missing".to_string()
|
||||||
|
}
|
||||||
},
|
},
|
||||||
ColumnType::FilePath => item_path
|
ColumnType::FilePath => item_path
|
||||||
.clone()
|
.clone()
|
||||||
|
|||||||
@@ -227,7 +227,7 @@ async fn handle_as_meta_response(
|
|||||||
/// offset/length applied at the stream level — never loads the full item into memory.
|
/// offset/length applied at the stream level — never loads the full item into memory.
|
||||||
async fn handle_as_meta_response_with_metadata(
|
async fn handle_as_meta_response_with_metadata(
|
||||||
db: &Arc<tokio::sync::Mutex<rusqlite::Connection>>,
|
db: &Arc<tokio::sync::Mutex<rusqlite::Connection>>,
|
||||||
data_dir: &std::path::Path,
|
_data_dir: &std::path::Path,
|
||||||
item_service: &Arc<ItemService>,
|
item_service: &Arc<ItemService>,
|
||||||
item_id: i64,
|
item_id: i64,
|
||||||
metadata: &HashMap<String, String>,
|
metadata: &HashMap<String, String>,
|
||||||
@@ -1451,7 +1451,7 @@ pub async fn handle_export_items(
|
|||||||
|
|
||||||
let db = state.db.clone();
|
let db = state.db.clone();
|
||||||
let item_service = state.item_service.clone();
|
let item_service = state.item_service.clone();
|
||||||
let data_dir = state.data_dir.clone();
|
let _data_dir = state.data_dir.clone();
|
||||||
|
|
||||||
// Resolve items in blocking task
|
// Resolve items in blocking task
|
||||||
let items_with_meta = task::spawn_blocking(move || {
|
let items_with_meta = task::spawn_blocking(move || {
|
||||||
@@ -1520,6 +1520,7 @@ pub async fn handle_export_items(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let tx_err = tx.clone();
|
||||||
let writer = ChannelWriter { tx };
|
let writer = ChannelWriter { tx };
|
||||||
|
|
||||||
if let Err(e) = crate::export_tar::write_export_tar(
|
if let Err(e) = crate::export_tar::write_export_tar(
|
||||||
@@ -1532,7 +1533,7 @@ pub async fn handle_export_items(
|
|||||||
&conn,
|
&conn,
|
||||||
) {
|
) {
|
||||||
warn!("Export tar write failed: {e}");
|
warn!("Export tar write failed: {e}");
|
||||||
let _ = tx.blocking_send(Err(std::io::Error::other(format!("Export failed: {e}"))));
|
let _ = tx_err.blocking_send(Err(std::io::Error::other(format!("Export failed: {e}"))));
|
||||||
}
|
}
|
||||||
// Channel drops here, signaling EOF to the stream
|
// Channel drops here, signaling EOF to the stream
|
||||||
});
|
});
|
||||||
@@ -1602,11 +1603,7 @@ pub async fn handle_import_items(
|
|||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
warn!("Failed to import tar: {e}");
|
warn!("Failed to import tar: {e}");
|
||||||
match &e {
|
match &e {
|
||||||
crate::services::error::CoreError::Io(io_err)
|
crate::services::error::CoreError::PayloadTooLarge => StatusCode::PAYLOAD_TOO_LARGE,
|
||||||
if io_err.to_string() == "Payload too large" =>
|
|
||||||
{
|
|
||||||
StatusCode::PAYLOAD_TOO_LARGE
|
|
||||||
}
|
|
||||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|||||||
@@ -419,6 +419,8 @@ impl TryFrom<ItemWithMeta> for ItemInfo {
|
|||||||
type Error = anyhow::Error;
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
fn try_from(item_with_meta: ItemWithMeta) -> Result<Self, Self::Error> {
|
fn try_from(item_with_meta: ItemWithMeta) -> Result<Self, Self::Error> {
|
||||||
|
let tags = item_with_meta.tag_names();
|
||||||
|
let metadata = item_with_meta.meta_as_map();
|
||||||
Ok(ItemInfo {
|
Ok(ItemInfo {
|
||||||
id: item_with_meta
|
id: item_with_meta
|
||||||
.item
|
.item
|
||||||
@@ -429,8 +431,8 @@ impl TryFrom<ItemWithMeta> for ItemInfo {
|
|||||||
compressed_size: item_with_meta.item.compressed_size,
|
compressed_size: item_with_meta.item.compressed_size,
|
||||||
closed: item_with_meta.item.closed,
|
closed: item_with_meta.item.closed,
|
||||||
compression: item_with_meta.item.compression,
|
compression: item_with_meta.item.compression,
|
||||||
tags: item_with_meta.tag_names(),
|
tags,
|
||||||
metadata: item_with_meta.meta_as_map(),
|
metadata,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -112,38 +112,27 @@ impl CompressionService {
|
|||||||
Ok(reader)
|
Ok(reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a decompressing reader wrapping the given reader.
|
|
||||||
///
|
|
||||||
/// Returns a boxed reader that decompresses on the fly based on the compression type.
|
|
||||||
/// Useful for decompressing network streams or other non-file sources.
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
///
|
|
||||||
/// * `reader` - The underlying compressed reader.
|
|
||||||
/// * `compression` - Compression type string (e.g., "gzip", "lz4").
|
|
||||||
///
|
|
||||||
/// # Returns
|
|
||||||
///
|
|
||||||
/// A boxed decompressing reader. Unknown/none types pass through unchanged.
|
|
||||||
pub fn decompressing_reader(
|
pub fn decompressing_reader(
|
||||||
reader: Box<dyn Read>,
|
reader: Box<dyn Read>,
|
||||||
compression: &CompressionType,
|
compression: &CompressionType,
|
||||||
) -> Box<dyn Read> {
|
) -> Result<Box<dyn Read>, CoreError> {
|
||||||
match compression {
|
match compression {
|
||||||
CompressionType::GZip => {
|
CompressionType::GZip => {
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
Box::new(GzDecoder::new(reader))
|
Ok(Box::new(GzDecoder::new(reader)))
|
||||||
}
|
}
|
||||||
CompressionType::LZ4 => {
|
CompressionType::LZ4 => {
|
||||||
use lz4_flex::frame::FrameDecoder;
|
use lz4_flex::frame::FrameDecoder;
|
||||||
Box::new(FrameDecoder::new(reader))
|
Ok(Box::new(FrameDecoder::new(reader)))
|
||||||
}
|
}
|
||||||
#[cfg(feature = "zstd")]
|
#[cfg(feature = "zstd")]
|
||||||
CompressionType::ZStd => {
|
CompressionType::ZStd => {
|
||||||
use zstd::stream::read::Decoder;
|
use zstd::stream::read::Decoder;
|
||||||
Box::new(Decoder::new(reader).expect("Failed to create zstd decoder"))
|
Ok(Box::new(Decoder::new(reader).map_err(|e| {
|
||||||
|
CoreError::Compression(format!("zstd decoder error: {}", e))
|
||||||
|
})?))
|
||||||
}
|
}
|
||||||
_ => reader,
|
_ => Ok(reader),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,24 +152,24 @@ impl CompressionService {
|
|||||||
pub fn compressing_writer(
|
pub fn compressing_writer(
|
||||||
writer: Box<dyn Write>,
|
writer: Box<dyn Write>,
|
||||||
compression: &CompressionType,
|
compression: &CompressionType,
|
||||||
) -> Box<dyn Write> {
|
) -> Result<Box<dyn Write>, CoreError> {
|
||||||
match compression {
|
match compression {
|
||||||
CompressionType::GZip => {
|
CompressionType::GZip => {
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
Box::new(GzEncoder::new(writer, Compression::default()))
|
Ok(Box::new(GzEncoder::new(writer, Compression::default())))
|
||||||
}
|
}
|
||||||
CompressionType::LZ4 => Box::new(lz4_flex::frame::FrameEncoder::new(writer)),
|
CompressionType::LZ4 => Ok(Box::new(lz4_flex::frame::FrameEncoder::new(writer))),
|
||||||
#[cfg(feature = "zstd")]
|
#[cfg(feature = "zstd")]
|
||||||
CompressionType::ZStd => {
|
CompressionType::ZStd => {
|
||||||
use zstd::stream::write::Encoder;
|
use zstd::stream::write::Encoder;
|
||||||
Box::new(
|
Ok(Box::new(
|
||||||
Encoder::new(writer, 3)
|
Encoder::new(writer, 3)
|
||||||
.expect("Failed to create zstd encoder")
|
.map_err(|e| CoreError::Compression(format!("zstd encoder error: {}", e)))?
|
||||||
.auto_finish(),
|
.auto_finish(),
|
||||||
)
|
))
|
||||||
}
|
}
|
||||||
_ => writer,
|
_ => Ok(writer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,32 +13,27 @@ use thiserror::Error;
|
|||||||
/// * `ItemNotFoundGeneric` - Generic item not found (no ID specified).
|
/// * `ItemNotFoundGeneric` - Generic item not found (no ID specified).
|
||||||
/// * `InvalidInput(String)` - User or config input validation failure with message.
|
/// * `InvalidInput(String)` - User or config input validation failure with message.
|
||||||
/// * `Compression(String)` - Compression/decompression errors with details.
|
/// * `Compression(String)` - Compression/decompression errors with details.
|
||||||
|
/// * `PayloadTooLarge` - Request body exceeded maximum allowed size.
|
||||||
/// * `Other(anyhow::Error)` - Catch-all for other anyhow-wrapped errors.
|
/// * `Other(anyhow::Error)` - Catch-all for other anyhow-wrapped errors.
|
||||||
/// * `Migration(rusqlite_migration::Error)` - Database migration failures.
|
/// * `Migration(rusqlite_migration::Error)` - Database migration failures.
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum CoreError {
|
pub enum CoreError {
|
||||||
#[error("Database error: {0}")]
|
#[error("Database error: {0}")]
|
||||||
/// Database operation failed.
|
|
||||||
Database(#[from] rusqlite::Error),
|
Database(#[from] rusqlite::Error),
|
||||||
#[error("I/O error: {0}")]
|
#[error("I/O error: {0}")]
|
||||||
/// File or stream I/O operation failed.
|
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
#[error("Item not found with id {0}")]
|
#[error("Item not found with id {0}")]
|
||||||
/// Item with the specified ID does not exist in the database.
|
|
||||||
ItemNotFound(i64),
|
ItemNotFound(i64),
|
||||||
#[error("Item not found")]
|
#[error("Item not found")]
|
||||||
/// Item does not exist (no specific ID).
|
|
||||||
ItemNotFoundGeneric,
|
ItemNotFoundGeneric,
|
||||||
#[error("Invalid input: {0}")]
|
#[error("Invalid input: {0}")]
|
||||||
/// Input validation failed.
|
|
||||||
InvalidInput(String),
|
InvalidInput(String),
|
||||||
#[error("Compression error: {0}")]
|
#[error("Compression error: {0}")]
|
||||||
/// Compression or decompression operation failed.
|
|
||||||
Compression(String),
|
Compression(String),
|
||||||
|
#[error("Payload too large")]
|
||||||
|
PayloadTooLarge,
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
/// Other unexpected error.
|
|
||||||
Other(#[from] anyhow::Error),
|
Other(#[from] anyhow::Error),
|
||||||
#[error("Migration error: {0}")]
|
#[error("Migration error: {0}")]
|
||||||
/// Database schema migration failed.
|
|
||||||
Migration(#[from] rusqlite_migration::Error),
|
Migration(#[from] rusqlite_migration::Error),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -62,6 +62,12 @@ impl ItemService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn item_path(&self, item_id: i64) -> PathBuf {
|
||||||
|
let mut path = self.data_path.clone();
|
||||||
|
path.push(item_id.to_string());
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
/// Retrieves an item with its associated metadata and tags.
|
/// Retrieves an item with its associated metadata and tags.
|
||||||
///
|
///
|
||||||
/// Fetches the item from the database by ID and loads its tags and metadata.
|
/// Fetches the item from the database by ID and loads its tags and metadata.
|
||||||
@@ -159,8 +165,7 @@ impl ItemService {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
debug!("ITEM_SERVICE: Reading content from path: {item_path:?}");
|
debug!("ITEM_SERVICE: Reading content from path: {item_path:?}");
|
||||||
|
|
||||||
let content = self
|
let content = self
|
||||||
@@ -304,8 +309,7 @@ impl ItemService {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
|
|
||||||
let reader = self
|
let reader = self
|
||||||
.compression_service
|
.compression_service
|
||||||
@@ -345,8 +349,7 @@ impl ItemService {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
|
|
||||||
let reader = self
|
let reader = self
|
||||||
.compression_service
|
.compression_service
|
||||||
@@ -540,8 +543,7 @@ impl ItemService {
|
|||||||
let item = db::get_item(conn, id)?.ok_or(CoreError::ItemNotFound(id))?;
|
let item = db::get_item(conn, id)?.ok_or(CoreError::ItemNotFound(id))?;
|
||||||
debug!("ITEM_SERVICE: Found item to delete: {item:?}");
|
debug!("ITEM_SERVICE: Found item to delete: {item:?}");
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(id);
|
||||||
item_path.push(id.to_string());
|
|
||||||
debug!("ITEM_SERVICE: Deleting file at path: {item_path:?}");
|
debug!("ITEM_SERVICE: Deleting file at path: {item_path:?}");
|
||||||
|
|
||||||
let deleted_item = item.clone();
|
let deleted_item = item.clone();
|
||||||
@@ -662,8 +664,7 @@ impl ItemService {
|
|||||||
debug!("ITEM_SERVICE: Got {} meta plugins", plugins.len());
|
debug!("ITEM_SERVICE: Got {} meta plugins", plugins.len());
|
||||||
meta_service.initialize_plugins(&mut plugins);
|
meta_service.initialize_plugins(&mut plugins);
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
debug!("ITEM_SERVICE: Writing item to path: {item_path:?}");
|
debug!("ITEM_SERVICE: Writing item to path: {item_path:?}");
|
||||||
|
|
||||||
let mut item_out = compression_engine.create(item_path.clone())?;
|
let mut item_out = compression_engine.create(item_path.clone())?;
|
||||||
@@ -859,8 +860,7 @@ impl ItemService {
|
|||||||
meta_service.initialize_plugins(&mut plugins);
|
meta_service.initialize_plugins(&mut plugins);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
|
|
||||||
let mut item_out = compression_engine.create(item_path.clone())?;
|
let mut item_out = compression_engine.create(item_path.clone())?;
|
||||||
|
|
||||||
@@ -933,8 +933,7 @@ impl ItemService {
|
|||||||
return self.get_item(conn, item_id);
|
return self.get_item(conn, item_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut item_path = self.data_path.clone();
|
let item_path = self.item_path(item_id);
|
||||||
item_path.push(item_id.to_string());
|
|
||||||
|
|
||||||
if !item_path.exists() {
|
if !item_path.exists() {
|
||||||
return Err(CoreError::ItemNotFound(item_id));
|
return Err(CoreError::ItemNotFound(item_id));
|
||||||
|
|||||||
@@ -13,5 +13,5 @@ pub use filter_service::{FilterService, register_filter_plugin};
|
|||||||
pub use item_service::ItemService;
|
pub use item_service::ItemService;
|
||||||
pub use meta_service::MetaService;
|
pub use meta_service::MetaService;
|
||||||
pub use status_service::StatusService;
|
pub use status_service::StatusService;
|
||||||
pub use types::{ItemWithContent, ItemWithMeta};
|
pub use types::{ItemInfo, ItemWithContent, ItemWithMeta};
|
||||||
pub use utils::{calc_byte_range, extract_tags, parse_comma_tags};
|
pub use utils::{calc_byte_range, extract_tags, parse_comma_tags};
|
||||||
|
|||||||
@@ -62,3 +62,15 @@ pub struct ItemWithContent {
|
|||||||
/// The content bytes.
|
/// The content bytes.
|
||||||
pub content: Vec<u8>,
|
pub content: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ItemInfo {
|
||||||
|
pub id: i64,
|
||||||
|
pub ts: String,
|
||||||
|
pub uncompressed_size: Option<i64>,
|
||||||
|
pub compressed_size: Option<i64>,
|
||||||
|
pub closed: bool,
|
||||||
|
pub compression: String,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
pub metadata: HashMap<String, String>,
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user