Files
keep/src/modes/save.rs
Andrew Phillips (aider) 887e9cda42 The SEARCH block must exactly match the existing lines, including context. Here's the corrected block for src/modes/get.rs:
```rust
<<<<<<< SEARCH
use std::str::FromStr;
use clap::Command;
=======
use std::str::FromStr;
use std::io::BufWriter;
use clap::Command;
>>>>>>> REPLACE
```

This ensures the `BufWriter` import is added between `FromStr` and `Command` without duplicating any existing lines.
2025-05-10 11:51:47 -03:00

160 lines
4.4 KiB
Rust

use anyhow::{anyhow, Context, Result};
use std::collections::HashMap;
use std::io::{self, Read};
use is_terminal::IsTerminal;
use std::path::PathBuf;
use is_terminal::IsTerminal;
use std::str::FromStr;
use clap::error::ErrorKind;
use clap::Command;
use log::debug;
use rusqlite::Connection;
use crate::compression::CompressionType;
use crate::db::{self};
use crate::modes::common::{get_meta_from_env};
use chrono::Utc;
pub fn mode_save(
cmd: &mut Command,
args: crate::Args,
ids: &mut Vec<i64>,
tags: &mut Vec<String>,
conn: &mut Connection,
data_path: PathBuf,
) -> Result<()> {
if !ids.is_empty() {
cmd.error(
ErrorKind::InvalidValue,
"ID given, you cannot supply IDs when using --save",
)
.exit();
}
if tags.is_empty() {
tags.push("none".to_string());
}
let compression_name = args
.item
.compression
.unwrap_or(compression::default_type().to_string());
use gethostname::gethostname;
use std::io::Write;
let compression_type_opt = CompressionType::from_str(&compression_name);
if compression_type_opt.is_err() {
cmd.error(
ErrorKind::InvalidValue,
format!("Unknown compression type: {}", compression_name),
)
.exit();
}
let compression_type = compression_type_opt.unwrap();
debug!("MAIN: Compression type: {}", compression_type);
let mut item = db::Item {
id: None,
ts: Utc::now(),
size: None,
compression: compression_type.to_string(),
};
let id = db::insert_item(&conn, item.clone())?;
item.id = Some(id);
debug!("MAIN: Added item {:?}", item.clone());
if !args.options.quiet {
if std::io::stderr().is_terminal() {
let mut t = term::stderr().unwrap();
t.reset().unwrap_or(());
t.attr(term::Attr::Bold).unwrap_or(());
write!(t, "KEEP:").unwrap_or(());
t.reset().unwrap_or(());
write!(t, " New item ").unwrap_or(());
t.attr(term::Attr::Bold).unwrap_or(());
write!(t, "{id}")?;
t.reset().unwrap_or(());
write!(t, " tags: ")?;
t.attr(term::Attr::Bold).unwrap_or(());
write!(t, "{}", tags.join(" "))?;
t.reset().unwrap_or(());
writeln!(t)?;
std::io::stderr().flush()?;
} else {
let mut t = std::io::stderr();
writeln!(t, "KEEP: New item: {} tags: {:?}", id, tags)?;
}
}
db::set_item_tags(&conn, item.clone(), tags)?;
let mut item_meta: HashMap<String, String> = get_meta_from_env();
if let Ok(hostname) = gethostname().into_string() {
if !item_meta.contains_key("hostname") {
item_meta.insert("hostname".to_string(), hostname);
}
}
for item in args.item.meta.iter() {
let item = item.clone();
item_meta.insert(item.key, item.value);
}
for kv in item_meta.iter() {
let meta = db::Meta {
id: item.id.unwrap(),
name: kv.0.to_string(),
value: kv.1.to_string(),
};
db::store_meta(&conn, meta)?;
}
let mut item_path = data_path.clone();
item_path.push(id.to_string());
let mut stdin = io::stdin().lock();
let mut stdout = io::stdout().lock();
let mut buffer = [0; libc::BUFSIZ as usize];
use crate::compression;
let compression_engine = compression::get_engine(compression_type.clone())
.expect("Unable to get compression engine");
let mut item_out: Box<dyn Write> =
compression_engine
.create(item_path.clone())
.context(anyhow!(
"Unable to write file {:?} using compression {:?}",
item_path,
compression_type
))?;
debug!("MAIN: Starting IO loop");
loop {
let n = stdin.read(&mut buffer[..libc::BUFSIZ as usize])?;
if n == 0 {
debug!("MAIN: EOF on STDIN");
break;
}
stdout.write_all(&buffer[..n])?;
item_out.write_all(&buffer[..n])?;
item.size = match item.size {
None => Some(n as i64),
Some(prev_n) => Some(prev_n + n as i64),
};
}
debug!("MAIN: Ending IO loop");
stdout.flush()?;
item_out.flush()?;
db::update_item(&conn, item.clone())?;
Ok(())
}