Co-authored-by: aider (openai/andrew/openrouter/anthropic/claude-sonnet-4) <aider@aider.chat>
347 lines
13 KiB
Rust
347 lines
13 KiB
Rust
use anyhow::{Result, anyhow};
|
|
use serde_json::Value;
|
|
use std::collections::HashMap;
|
|
use std::io::Write;
|
|
use std::str::FromStr;
|
|
use log::{debug, warn};
|
|
|
|
use crate::modes::server::common::AppState;
|
|
use crate::db;
|
|
use crate::compression_engine::{CompressionType, get_compression_engine};
|
|
use crate::meta_plugin::{MetaPluginType, get_meta_plugin};
|
|
|
|
#[derive(Debug, thiserror::Error)]
|
|
pub enum ToolError {
|
|
#[error("Unknown tool: {0}")]
|
|
UnknownTool(String),
|
|
#[error("Invalid arguments: {0}")]
|
|
InvalidArguments(String),
|
|
#[error("Database error: {0}")]
|
|
Database(#[from] rusqlite::Error),
|
|
#[error("IO error: {0}")]
|
|
Io(#[from] std::io::Error),
|
|
#[error("JSON error: {0}")]
|
|
Json(#[from] serde_json::Error),
|
|
#[error("Other error: {0}")]
|
|
Other(#[from] anyhow::Error),
|
|
}
|
|
|
|
pub struct KeepTools {
|
|
state: AppState,
|
|
}
|
|
|
|
impl KeepTools {
|
|
pub fn new(state: AppState) -> Self {
|
|
Self { state }
|
|
}
|
|
|
|
pub async fn save_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
|
let args = args.ok_or_else(|| ToolError::InvalidArguments("Missing arguments".to_string()))?;
|
|
|
|
let content = args.get("content")
|
|
.and_then(|v| v.as_str())
|
|
.ok_or_else(|| ToolError::InvalidArguments("Missing 'content' field".to_string()))?;
|
|
|
|
let tags: Vec<String> = args.get("tags")
|
|
.and_then(|v| v.as_array())
|
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(|s| s.to_string())).collect())
|
|
.unwrap_or_default();
|
|
|
|
let metadata: HashMap<String, String> = args.get("metadata")
|
|
.and_then(|v| v.as_object())
|
|
.map(|obj| obj.iter().filter_map(|(k, v)| {
|
|
v.as_str().map(|s| (k.clone(), s.to_string()))
|
|
}).collect())
|
|
.unwrap_or_default();
|
|
|
|
debug!("MCP: Saving item with {} bytes, {} tags, {} metadata entries",
|
|
content.len(), tags.len(), metadata.len());
|
|
|
|
let mut conn = self.state.db.lock().await;
|
|
|
|
// Create new item
|
|
let item = db::create_item(&mut *conn, CompressionType::LZ4)?;
|
|
let item_id = item.id.ok_or_else(|| anyhow!("Failed to get item ID"))?;
|
|
|
|
// Save content to file
|
|
let mut item_path = self.state.data_dir.clone();
|
|
item_path.push(item_id.to_string());
|
|
|
|
let compression_engine = get_compression_engine(CompressionType::LZ4)?;
|
|
let mut writer = compression_engine.create(item_path)?;
|
|
writer.write_all(content.as_bytes())?;
|
|
drop(writer); // Ensure file is closed
|
|
|
|
// Add tags
|
|
for tag in &tags {
|
|
db::add_tag(&mut *conn, item_id, tag)?;
|
|
}
|
|
|
|
// Add custom metadata
|
|
for (key, value) in &metadata {
|
|
db::add_meta(&mut *conn, item_id, key, value)?;
|
|
}
|
|
|
|
// Run metadata plugins
|
|
let meta_plugins = vec![
|
|
MetaPluginType::FileMime,
|
|
MetaPluginType::FileEncoding,
|
|
MetaPluginType::Binary,
|
|
MetaPluginType::LineCount,
|
|
MetaPluginType::WordCount,
|
|
MetaPluginType::DigestSha256,
|
|
MetaPluginType::Uid,
|
|
MetaPluginType::User,
|
|
MetaPluginType::Hostname,
|
|
];
|
|
|
|
for plugin_type in meta_plugins {
|
|
let mut plugin = get_meta_plugin(plugin_type);
|
|
if plugin.is_supported() {
|
|
if let Err(e) = plugin.initialize(&*conn, item_id) {
|
|
warn!("Failed to initialize plugin {:?}: {}", plugin_type, e);
|
|
continue;
|
|
}
|
|
|
|
let mut item_path = self.state.data_dir.clone();
|
|
item_path.push(item_id.to_string());
|
|
|
|
if let Err(e) = plugin.process_file(&*conn, item_id, &item_path) {
|
|
warn!("Failed to process file with plugin {:?}: {}", plugin_type, e);
|
|
continue;
|
|
}
|
|
|
|
if let Err(e) = plugin.finalize(&*conn) {
|
|
warn!("Failed to finalize plugin {:?}: {}", plugin_type, e);
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(format!("Successfully saved item with ID: {}", item_id))
|
|
}
|
|
|
|
pub async fn get_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
|
let args = args.ok_or_else(|| ToolError::InvalidArguments("Missing arguments".to_string()))?;
|
|
|
|
let item_id = args.get("id")
|
|
.and_then(|v| v.as_i64())
|
|
.ok_or_else(|| ToolError::InvalidArguments("Missing or invalid 'id' field".to_string()))?;
|
|
|
|
let mut conn = self.state.db.lock().await;
|
|
|
|
let item = db::get_item(&mut *conn, item_id)?
|
|
.ok_or_else(|| ToolError::InvalidArguments(format!("Item {} not found", item_id)))?;
|
|
|
|
// Get content
|
|
let mut item_path = self.state.data_dir.clone();
|
|
item_path.push(item_id.to_string());
|
|
|
|
let compression_type = crate::compression_engine::CompressionType::from_str(&item.compression)?;
|
|
let compression_engine = get_compression_engine(compression_type)?;
|
|
|
|
let mut reader = compression_engine.open(item_path)?;
|
|
let mut content = String::new();
|
|
std::io::Read::read_to_string(&mut reader, &mut content)?;
|
|
|
|
// Get metadata and tags
|
|
let tags = db::get_item_tags(&mut *conn, &item)?;
|
|
let metadata = db::get_item_meta(&mut *conn, &item)?;
|
|
|
|
let response = serde_json::json!({
|
|
"id": item_id,
|
|
"content": content,
|
|
"timestamp": item.ts.to_rfc3339(),
|
|
"size": item.size,
|
|
"compression": item.compression,
|
|
"tags": tags.iter().map(|t| &t.name).collect::<Vec<_>>(),
|
|
"metadata": metadata.iter().map(|m| (&m.name, &m.value)).collect::<HashMap<_, _>>()
|
|
});
|
|
|
|
Ok(serde_json::to_string_pretty(&response)?)
|
|
}
|
|
|
|
pub async fn get_latest_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
|
let tags: Vec<String> = args
|
|
.and_then(|v| v.get("tags"))
|
|
.and_then(|v| v.as_array())
|
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(|s| s.to_string())).collect())
|
|
.unwrap_or_default();
|
|
|
|
let mut conn = self.state.db.lock().await;
|
|
|
|
let item = if tags.is_empty() {
|
|
db::get_item_last(&mut *conn)?
|
|
} else {
|
|
db::get_item_matching(&mut *conn, &tags, &HashMap::new())?
|
|
};
|
|
|
|
let item = item.ok_or_else(|| ToolError::InvalidArguments("No items found".to_string()))?;
|
|
let item_id = item.id.ok_or_else(|| anyhow!("Item missing ID"))?;
|
|
|
|
// Get content
|
|
let mut item_path = self.state.data_dir.clone();
|
|
item_path.push(item_id.to_string());
|
|
|
|
let compression_type = crate::compression_engine::CompressionType::from_str(&item.compression)?;
|
|
let compression_engine = get_compression_engine(compression_type)?;
|
|
|
|
let mut reader = compression_engine.open(item_path)?;
|
|
let mut content = String::new();
|
|
std::io::Read::read_to_string(&mut reader, &mut content)?;
|
|
|
|
// Get metadata and tags
|
|
let tags = db::get_item_tags(&mut *conn, &item)?;
|
|
let metadata = db::get_item_meta(&mut *conn, &item)?;
|
|
|
|
let response = serde_json::json!({
|
|
"id": item_id,
|
|
"content": content,
|
|
"timestamp": item.ts.to_rfc3339(),
|
|
"size": item.size,
|
|
"compression": item.compression,
|
|
"tags": tags.iter().map(|t| &t.name).collect::<Vec<_>>(),
|
|
"metadata": metadata.iter().map(|m| (&m.name, &m.value)).collect::<HashMap<_, _>>()
|
|
});
|
|
|
|
Ok(serde_json::to_string_pretty(&response)?)
|
|
}
|
|
|
|
pub async fn list_items(&self, args: Option<Value>) -> Result<String, ToolError> {
|
|
let tags: Vec<String> = args
|
|
.as_ref()
|
|
.and_then(|v| v.get("tags"))
|
|
.and_then(|v| v.as_array())
|
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(|s| s.to_string())).collect())
|
|
.unwrap_or_default();
|
|
|
|
let limit = args
|
|
.as_ref()
|
|
.and_then(|v| v.get("limit"))
|
|
.and_then(|v| v.as_u64())
|
|
.unwrap_or(10) as usize;
|
|
|
|
let offset = args
|
|
.as_ref()
|
|
.and_then(|v| v.get("offset"))
|
|
.and_then(|v| v.as_u64())
|
|
.unwrap_or(0) as usize;
|
|
|
|
let mut conn = self.state.db.lock().await;
|
|
|
|
let items = if tags.is_empty() {
|
|
db::get_items(&mut *conn)?
|
|
} else {
|
|
db::get_items_matching(&mut *conn, &tags, &HashMap::new())?
|
|
};
|
|
|
|
// Sort by timestamp (newest first) and apply pagination
|
|
let mut items = items;
|
|
items.sort_by(|a, b| b.ts.cmp(&a.ts));
|
|
let items: Vec<_> = items.into_iter().skip(offset).take(limit).collect();
|
|
|
|
// Get item IDs for batch queries
|
|
let item_ids: Vec<i64> = items.iter().filter_map(|item| item.id).collect();
|
|
|
|
// Get tags and metadata for all items
|
|
let tags_map = db::get_tags_for_items(&mut *conn, &item_ids)?;
|
|
let meta_map = db::get_meta_for_items(&mut *conn, &item_ids)?;
|
|
|
|
let items_info: Vec<_> = items
|
|
.into_iter()
|
|
.map(|item| {
|
|
let item_id = item.id.unwrap_or(0);
|
|
let item_tags = tags_map.get(&item_id)
|
|
.map(|tags| tags.iter().map(|t| &t.name).collect::<Vec<_>>())
|
|
.unwrap_or_default();
|
|
let item_meta = meta_map.get(&item_id)
|
|
.map(|meta| meta.iter().map(|m| (&m.name, &m.value)).collect::<HashMap<_, _>>())
|
|
.unwrap_or_default();
|
|
|
|
serde_json::json!({
|
|
"id": item_id,
|
|
"timestamp": item.ts.to_rfc3339(),
|
|
"size": item.size,
|
|
"compression": item.compression,
|
|
"tags": item_tags,
|
|
"metadata": item_meta
|
|
})
|
|
})
|
|
.collect();
|
|
|
|
let response = serde_json::json!({
|
|
"items": items_info,
|
|
"count": items_info.len(),
|
|
"offset": offset,
|
|
"limit": limit
|
|
});
|
|
|
|
Ok(serde_json::to_string_pretty(&response)?)
|
|
}
|
|
|
|
pub async fn search_items(&self, args: Option<Value>) -> Result<String, ToolError> {
|
|
let tags: Vec<String> = args
|
|
.as_ref()
|
|
.and_then(|v| v.get("tags"))
|
|
.and_then(|v| v.as_array())
|
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(|s| s.to_string())).collect())
|
|
.unwrap_or_default();
|
|
|
|
let metadata: HashMap<String, String> = args
|
|
.as_ref()
|
|
.and_then(|v| v.get("metadata"))
|
|
.and_then(|v| v.as_object())
|
|
.map(|obj| obj.iter().filter_map(|(k, v)| {
|
|
v.as_str().map(|s| (k.clone(), s.to_string()))
|
|
}).collect())
|
|
.unwrap_or_default();
|
|
|
|
let mut conn = self.state.db.lock().await;
|
|
|
|
let items = db::get_items_matching(&mut *conn, &tags, &metadata)?;
|
|
|
|
// Sort by timestamp (newest first)
|
|
let mut items = items;
|
|
items.sort_by(|a, b| b.ts.cmp(&a.ts));
|
|
|
|
// Get item IDs for batch queries
|
|
let item_ids: Vec<i64> = items.iter().filter_map(|item| item.id).collect();
|
|
|
|
// Get tags and metadata for all items
|
|
let tags_map = db::get_tags_for_items(&mut *conn, &item_ids)?;
|
|
let meta_map = db::get_meta_for_items(&mut *conn, &item_ids)?;
|
|
|
|
let items_info: Vec<_> = items
|
|
.into_iter()
|
|
.map(|item| {
|
|
let item_id = item.id.unwrap_or(0);
|
|
let item_tags = tags_map.get(&item_id)
|
|
.map(|tags| tags.iter().map(|t| &t.name).collect::<Vec<_>>())
|
|
.unwrap_or_default();
|
|
let item_meta = meta_map.get(&item_id)
|
|
.map(|meta| meta.iter().map(|m| (&m.name, &m.value)).collect::<HashMap<_, _>>())
|
|
.unwrap_or_default();
|
|
|
|
serde_json::json!({
|
|
"id": item_id,
|
|
"timestamp": item.ts.to_rfc3339(),
|
|
"size": item.size,
|
|
"compression": item.compression,
|
|
"tags": item_tags,
|
|
"metadata": item_meta
|
|
})
|
|
})
|
|
.collect();
|
|
|
|
let response = serde_json::json!({
|
|
"items": items_info,
|
|
"count": items_info.len(),
|
|
"search_criteria": {
|
|
"tags": tags,
|
|
"metadata": metadata
|
|
}
|
|
});
|
|
|
|
Ok(serde_json::to_string_pretty(&response)?)
|
|
}
|
|
}
|