extern crate apitest;
use failure::*;
+use std::os::unix::io::AsRawFd;
-use std::collections::HashMap;
-use std::fs::File;
-use std::io::Read;
-use std::io::ErrorKind;
-use std::io::prelude::*;
-use std::iter::Iterator;
-
+use apitest::tools;
use apitest::cli::command::*;
use apitest::api::schema::*;
use apitest::api::router::*;
use apitest::backup::chunk_store::*;
-use serde_json::{json, Value};
-use std::path::{Path, PathBuf};
+use apitest::backup::image_index::*;
+use apitest::backup::datastore::*;
+use serde_json::{Value};
use apitest::config::datastore;
}
-// Note: We cannot implement an Iterator, because Iterators cannot
-// return a borrowed buffer ref (we want zero-copy)
-fn file_chunker<C>(
- mut file: File,
- chunk_size: usize,
- chunk_cb: C
-) -> Result<(), Error>
- where C: Fn(usize, &[u8]) -> Result<bool, Error>
-{
-
- const read_buffer_size: usize = 4*1024*1024; // 4M
-
- if chunk_size > read_buffer_size { bail!("chunk size too large!"); }
-
- let mut buf = vec![0u8; read_buffer_size];
-
- let mut pos = 0;
- let mut file_pos = 0;
- loop {
- let mut eof = false;
- let mut tmp = &mut buf[..];
- // try to read large portions, at least chunk_size
- while pos < chunk_size {
- match file.read(tmp) {
- Ok(0) => { eof = true; break; },
- Ok(n) => {
- pos += n;
- if pos > chunk_size { break; }
- tmp = &mut tmp[n..];
- }
- Err(ref e) if e.kind() == ErrorKind::Interrupted => { /* try again */ }
- Err(e) => bail!("read error - {}", e.to_string()),
- }
- }
- println!("READ {} {}", pos, eof);
-
- let mut start = 0;
- while start + chunk_size <= pos {
- if !(chunk_cb)(file_pos, &buf[start..start+chunk_size])? { break; }
- file_pos += chunk_size;
- start += chunk_size;
- }
- if eof {
- if start < pos {
- (chunk_cb)(file_pos, &buf[start..pos])?;
- //file_pos += pos - start;
- }
- break;
- } else {
- let rest = pos - start;
- if rest > 0 {
- let ptr = buf.as_mut_ptr();
- unsafe { std::ptr::copy_nonoverlapping(ptr.add(start), ptr, rest); }
- pos = rest;
- } else {
- pos = 0;
- }
- }
- }
-
- Ok(())
-
-}
-
fn backup_file(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
let filename = required_string_param(¶m, "filename");
let store = required_string_param(¶m, "store");
- let config = datastore::config()?;
- let (_, store_config) = config.sections.get(store)
- .ok_or(format_err!("no such datastore '{}'", store))?;
+ let mut datastore = DataStore::open(store)?;
- let path = store_config["path"].as_str().unwrap();
+ println!("Backup file '{}' to '{}'", filename, store);
- let _store = ChunkStore::open(path)?;
+ let target = "test1.idx";
- println!("Backup file '{}' to '{}'", filename, store);
+ {
+ let file = std::fs::File::open(filename)?;
+ let stat = nix::sys::stat::fstat(file.as_raw_fd())?;
+ if stat.st_size <= 0 { bail!("got strange file size '{}'", stat.st_size); }
+ let size = stat.st_size as usize;
+
+ let mut index = datastore.create_image_writer(target, size)?;
- let file = std::fs::File::open(filename)?;
+ tools::file_chunker(file, 64*1024, |pos, chunk| {
+ index.add_chunk(pos, chunk)?;
+ Ok(true)
+ })?;
- file_chunker(file, 64*1024, |pos, chunk| {
- println!("CHUNK {} {}", pos, chunk.len());
- Ok(true)
- })?;
+ index.close()?; // commit changes
+ }
+
+ let idx = datastore.open_image_reader(target)?;
+ idx.print_info();
Ok(Value::Null)
}
.required("filename", StringSchema::new("Source file name."))
.required("store", StringSchema::new("Datastore name."))
))
- .arg_param(vec!["filename"]);
+ .arg_param(vec!["filename"])
+ .completion_cb("store", apitest::config::datastore::complete_datastore_name);
+
if let Err(err) = run_cli_command(&cmd_def.into()) {
eprintln!("Error: {}", err);