Compare commits
2 Commits
af1a60665a
...
894fcbcd02
Author | SHA1 | Date | |
---|---|---|---|
894fcbcd02 | |||
716b559062 |
168
src/morethantext/cache.rs
Normal file
168
src/morethantext/cache.rs
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
use super::{DBError, SessionData, Store};
|
||||||
|
use async_std::{fs::write, path::Path};
|
||||||
|
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||||
|
use std::{
|
||||||
|
cell::Cell,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
enum DataType {
|
||||||
|
DBMap(Store),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DataType {
|
||||||
|
fn new(data_type: &str) -> Result<Self, DBError> {
|
||||||
|
match data_type {
|
||||||
|
"store" => Ok(DataType::DBMap(Store::new())),
|
||||||
|
_ => Err(DBError::new("invalid data type")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SessionData for DataType {
|
||||||
|
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||||
|
match self {
|
||||||
|
DataType::DBMap(dbs) => dbs.add(key, value, data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||||
|
match self {
|
||||||
|
DataType::DBMap(dbs) => dbs.eq(key, value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||||
|
match self {
|
||||||
|
DataType::DBMap(dbs) => dbs.list(keys),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Entry {
|
||||||
|
data: DataType,
|
||||||
|
filename: String,
|
||||||
|
last_used: Cell<Instant>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Entry {
|
||||||
|
async fn new(filename: String, data: DataType) -> Result<Self, DBError> {
|
||||||
|
if Path::new(&filename).exists().await {
|
||||||
|
return Err(DBError::new("entry already exists"));
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
data: data,
|
||||||
|
filename: filename,
|
||||||
|
last_used: Cell::new(Instant::now()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get(&self) -> Result<DataType, DBError> {
|
||||||
|
Ok(self.data.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Cache;
|
||||||
|
|
||||||
|
impl Cache {
|
||||||
|
async fn new(dir: &str) -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod datatype_sesssion {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_cache_type() -> Result<(), DBError> {
|
||||||
|
match DataType::new("dkhgdl") {
|
||||||
|
Ok(_) => Err(DBError::new("invalid data type should raise an error")),
|
||||||
|
Err(err) => {
|
||||||
|
assert_eq!(err.to_string(), "invalid data type");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_storage() {
|
||||||
|
let dbs = DataType::new("store").unwrap();
|
||||||
|
let expected: Vec<String> = Vec::new();
|
||||||
|
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn update_storage() {
|
||||||
|
let mut dbs = DataType::new("store").unwrap();
|
||||||
|
let name = "new_database";
|
||||||
|
let id = "someid";
|
||||||
|
dbs.add("database", name, id).unwrap();
|
||||||
|
assert_eq!(dbs.eq("database", name).unwrap(), [id].to_vec());
|
||||||
|
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), [name].to_vec());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod datatype_file {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// Test file data traits here.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod entry {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[async_std::test]
|
||||||
|
async fn create() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let mut data = DataType::new("store").unwrap();
|
||||||
|
data.add("database", "roger", "moore").unwrap();
|
||||||
|
let filepath = dir.path().join("wiliam");
|
||||||
|
let filename = filepath.to_str().unwrap();
|
||||||
|
let item = Entry::new(filename.to_string(), data.clone())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let output = item.get().await.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
data.list(["database"].to_vec()).unwrap(),
|
||||||
|
output.list(["database"].to_vec()).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_std::test]
|
||||||
|
async fn no_over_writes() -> Result<(), DBError> {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let id = "wicked";
|
||||||
|
let file = dir.path().join(id);
|
||||||
|
let filename = file.to_str().unwrap();
|
||||||
|
write(&file, b"previous").await.unwrap();
|
||||||
|
let data = DataType::new("store").unwrap();
|
||||||
|
match Entry::new(filename.to_string(), data).await {
|
||||||
|
Ok(_) => {
|
||||||
|
return Err(DBError::new(
|
||||||
|
"Should produce an error for an existing Entry",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert_eq!(err.to_string(), "entry already exists");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod cache {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[async_std::test]
|
||||||
|
async fn create() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
Cache::new(dir.path().to_str().unwrap()).await;
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
mod databases;
|
mod cache;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
mod store;
|
||||||
|
|
||||||
use async_std::{
|
use async_std::{
|
||||||
fs::{create_dir, read, remove_file, write},
|
fs::{create_dir, read, remove_file, write},
|
||||||
@ -7,7 +8,6 @@ use async_std::{
|
|||||||
sync::{Arc, Mutex},
|
sync::{Arc, Mutex},
|
||||||
task::{sleep, spawn},
|
task::{sleep, spawn},
|
||||||
};
|
};
|
||||||
use databases::Databases;
|
|
||||||
use error::DBError;
|
use error::DBError;
|
||||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||||
use std::{
|
use std::{
|
||||||
@ -15,6 +15,7 @@ use std::{
|
|||||||
fmt, slice, str,
|
fmt, slice, str,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
use store::Store;
|
||||||
|
|
||||||
const DATA: &str = "data";
|
const DATA: &str = "data";
|
||||||
const ENTRY: &str = "databases";
|
const ENTRY: &str = "databases";
|
||||||
@ -26,14 +27,14 @@ trait FileData<F> {
|
|||||||
|
|
||||||
trait SessionData {
|
trait SessionData {
|
||||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError>;
|
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError>;
|
||||||
fn get(&self, key: &str, value: &str) -> Result<Vec<String>, DBError>;
|
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError>;
|
||||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError>;
|
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum CacheType {
|
pub enum CacheType {
|
||||||
Raw(String),
|
Raw(String),
|
||||||
DBMap(Databases),
|
DBMap(Store),
|
||||||
TableMap,
|
TableMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +82,7 @@ impl CacheType {
|
|||||||
}
|
}
|
||||||
Ok(CacheType::Raw(str::from_utf8(&output).unwrap().to_string()))
|
Ok(CacheType::Raw(str::from_utf8(&output).unwrap().to_string()))
|
||||||
}
|
}
|
||||||
"DBMap" => Ok(CacheType::DBMap(Databases::new())),
|
"DBMap" => Ok(CacheType::DBMap(Store::new())),
|
||||||
"TableMap" => Ok(CacheType::TableMap),
|
"TableMap" => Ok(CacheType::TableMap),
|
||||||
_ => Err(DBError::new("data corruption")),
|
_ => Err(DBError::new("data corruption")),
|
||||||
}
|
}
|
||||||
@ -164,7 +165,7 @@ impl MoreThanText {
|
|||||||
id = str::from_utf8(&holder).unwrap().to_string();
|
id = str::from_utf8(&holder).unwrap().to_string();
|
||||||
} else {
|
} else {
|
||||||
id = output
|
id = output
|
||||||
.add_entry(CacheType::DBMap(Databases::new()))
|
.add_entry(CacheType::DBMap(Store::new()))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
write(entry_file, id.as_bytes()).await.unwrap();
|
write(entry_file, id.as_bytes()).await.unwrap();
|
||||||
@ -195,6 +196,26 @@ impl MoreThanText {
|
|||||||
filename.into_os_string().into_string().unwrap()
|
filename.into_os_string().into_string().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new_id(&self) -> String {
|
||||||
|
thread_rng().sample_iter(&Alphanumeric).take(64).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn add(&self, feature: &str, key: &str, value: &str) -> Self {
|
||||||
|
let mut ids: Vec<String> = Vec::new();
|
||||||
|
for id in self.session.clone().into_iter() {
|
||||||
|
let holder = self.get_entry(&id).await.unwrap();
|
||||||
|
//holder.add(feature, key, value);
|
||||||
|
}
|
||||||
|
let mut output = self.clone();
|
||||||
|
output.session.clear();
|
||||||
|
output.session.push(value.to_string());
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn list(&self, feature: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||||
|
Ok(Vec::new())
|
||||||
|
}
|
||||||
|
|
||||||
async fn add_entry(&self, entry: CacheType) -> Result<String, DBError> {
|
async fn add_entry(&self, entry: CacheType) -> Result<String, DBError> {
|
||||||
let mut id: String = "".to_string();
|
let mut id: String = "".to_string();
|
||||||
let mut dup = true;
|
let mut dup = true;
|
||||||
@ -364,7 +385,37 @@ mod init {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod cache {
|
mod data {
|
||||||
|
use super::*;
|
||||||
|
use setup::MTT;
|
||||||
|
|
||||||
|
#[async_std::test]
|
||||||
|
async fn ids_are_random() {
|
||||||
|
let mtt = MTT::new().await;
|
||||||
|
let id1 = mtt.db.new_id();
|
||||||
|
let id2 = mtt.db.new_id();
|
||||||
|
assert_ne!(id1, id2, "Ids should be random");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_std::test]
|
||||||
|
async fn add_database() {
|
||||||
|
let mtt = MTT::new().await;
|
||||||
|
let name = "fred";
|
||||||
|
let id = "*gsdfg";
|
||||||
|
let output = mtt.db.add("database", name, id).await;
|
||||||
|
assert_eq!(output.session, [id], "should update session info.");
|
||||||
|
/*
|
||||||
|
assert_eq!(
|
||||||
|
mtt.db.list(["database"].to_vec()).await.unwrap(),
|
||||||
|
[name],
|
||||||
|
"Should list the databases."
|
||||||
|
);
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod cache_test {
|
||||||
use super::*;
|
use super::*;
|
||||||
use async_std::fs::read;
|
use async_std::fs::read;
|
||||||
use setup::MTT;
|
use setup::MTT;
|
||||||
@ -724,13 +775,13 @@ mod enum_ctype {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_dbmap_type() {
|
fn get_dbmap_type() {
|
||||||
let holder = CacheType::DBMap(Databases::new());
|
let holder = CacheType::DBMap(Store::new());
|
||||||
assert_eq!(holder.entry_type(), "DBMap");
|
assert_eq!(holder.entry_type(), "DBMap");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_new_databases_bytes() {
|
fn get_new_databases_bytes() {
|
||||||
let holder = CacheType::DBMap(Databases::new());
|
let holder = CacheType::DBMap(Store::new());
|
||||||
let mut expected = "DBMap".as_bytes().to_vec();
|
let mut expected = "DBMap".as_bytes().to_vec();
|
||||||
expected.push(0);
|
expected.push(0);
|
||||||
let output = holder.to_bytes();
|
let output = holder.to_bytes();
|
||||||
|
@ -2,11 +2,11 @@ use super::{DBError, FileData, SessionData};
|
|||||||
use std::{collections::HashMap, slice, str};
|
use std::{collections::HashMap, slice, str};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Databases {
|
pub struct Store {
|
||||||
db_map: HashMap<String, String>,
|
db_map: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Databases {
|
impl Store {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
db_map: HashMap::new(),
|
db_map: HashMap::new(),
|
||||||
@ -22,7 +22,7 @@ impl Databases {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileData<Self> for Databases {
|
impl FileData<Self> for Store {
|
||||||
fn to_bytes(&self) -> Vec<u8> {
|
fn to_bytes(&self) -> Vec<u8> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
for (name, id) in self.db_map.iter() {
|
for (name, id) in self.db_map.iter() {
|
||||||
@ -35,7 +35,7 @@ impl FileData<Self> for Databases {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
||||||
let mut output = Databases::new();
|
let mut output = Store::new();
|
||||||
let mut name: Vec<u8> = Vec::new();
|
let mut name: Vec<u8> = Vec::new();
|
||||||
let mut id: Vec<u8> = Vec::new();
|
let mut id: Vec<u8> = Vec::new();
|
||||||
let mut get_id = false;
|
let mut get_id = false;
|
||||||
@ -80,7 +80,7 @@ impl FileData<Self> for Databases {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SessionData for Databases {
|
impl SessionData for Store {
|
||||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||||
match Self::test_key(key) {
|
match Self::test_key(key) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
@ -96,7 +96,7 @@ impl SessionData for Databases {
|
|||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||||
match Self::test_key(key) {
|
match Self::test_key(key) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
@ -129,7 +129,7 @@ mod file_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn to_bytes_new() {
|
fn to_bytes_new() {
|
||||||
let dbs = Databases::new();
|
let dbs = Store::new();
|
||||||
let expected: Vec<u8> = Vec::new();
|
let expected: Vec<u8> = Vec::new();
|
||||||
let output = dbs.to_bytes();
|
let output = dbs.to_bytes();
|
||||||
assert_eq!(output, expected);
|
assert_eq!(output, expected);
|
||||||
@ -137,7 +137,7 @@ mod file_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn to_bytes_with_database() {
|
fn to_bytes_with_database() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
let name = "something";
|
let name = "something";
|
||||||
let id = "id";
|
let id = "id";
|
||||||
dbs.add("database", name, id).unwrap();
|
dbs.add("database", name, id).unwrap();
|
||||||
@ -152,13 +152,13 @@ mod file_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn from_bytes() {
|
fn from_bytes() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
dbs.add("database", "one", "1").unwrap();
|
dbs.add("database", "one", "1").unwrap();
|
||||||
dbs.add("database", "two", "2").unwrap();
|
dbs.add("database", "two", "2").unwrap();
|
||||||
dbs.add("database", "three", "3").unwrap();
|
dbs.add("database", "three", "3").unwrap();
|
||||||
let data = dbs.to_bytes();
|
let data = dbs.to_bytes();
|
||||||
let mut feed = data.iter();
|
let mut feed = data.iter();
|
||||||
let output = Databases::from_bytes(&mut feed).unwrap();
|
let output = Store::from_bytes(&mut feed).unwrap();
|
||||||
assert_eq!(output.db_map, dbs.db_map);
|
assert_eq!(output.db_map, dbs.db_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -166,7 +166,7 @@ mod file_data {
|
|||||||
fn from_bytes_incomplete_name() {
|
fn from_bytes_incomplete_name() {
|
||||||
let data = "notName".as_bytes();
|
let data = "notName".as_bytes();
|
||||||
let mut feed = data.iter();
|
let mut feed = data.iter();
|
||||||
match Databases::from_bytes(&mut feed) {
|
match Store::from_bytes(&mut feed) {
|
||||||
Ok(_) => assert!(false, "This should have failed."),
|
Ok(_) => assert!(false, "This should have failed."),
|
||||||
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
||||||
}
|
}
|
||||||
@ -178,7 +178,7 @@ mod file_data {
|
|||||||
data.push(0);
|
data.push(0);
|
||||||
data.append(&mut "nope".as_bytes().to_vec());
|
data.append(&mut "nope".as_bytes().to_vec());
|
||||||
let mut feed = data.iter();
|
let mut feed = data.iter();
|
||||||
match Databases::from_bytes(&mut feed) {
|
match Store::from_bytes(&mut feed) {
|
||||||
Ok(_) => assert!(false, "This should have failed."),
|
Ok(_) => assert!(false, "This should have failed."),
|
||||||
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
||||||
}
|
}
|
||||||
@ -196,7 +196,7 @@ mod file_data {
|
|||||||
data.append(&mut "second".as_bytes().to_vec());
|
data.append(&mut "second".as_bytes().to_vec());
|
||||||
data.push(0);
|
data.push(0);
|
||||||
let mut feed = data.iter();
|
let mut feed = data.iter();
|
||||||
match Databases::from_bytes(&mut feed) {
|
match Store::from_bytes(&mut feed) {
|
||||||
Ok(_) => assert!(false, "This should have failed."),
|
Ok(_) => assert!(false, "This should have failed."),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
assert_eq!(err.to_string(), "file corruption");
|
assert_eq!(err.to_string(), "file corruption");
|
||||||
@ -219,18 +219,18 @@ mod session_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_new() {
|
fn add_new() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
let key = "database";
|
let key = "database";
|
||||||
let value = "marvin";
|
let value = "marvin";
|
||||||
let data = "123456";
|
let data = "123456";
|
||||||
assert_eq!(dbs.add(key, value, data).unwrap(), [data]);
|
assert_eq!(dbs.add(key, value, data).unwrap(), [data]);
|
||||||
let output = dbs.get(key, value).unwrap();
|
let output = dbs.eq(key, value).unwrap();
|
||||||
assert_eq!(output, [data]);
|
assert_eq!(output, [data]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_bad_key() {
|
fn add_bad_key() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
let key = "sdgfjksg";
|
let key = "sdgfjksg";
|
||||||
match dbs.add(key, "fred", "barney") {
|
match dbs.add(key, "fred", "barney") {
|
||||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||||
@ -240,9 +240,9 @@ mod session_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_bad_key() {
|
fn get_bad_key() {
|
||||||
let dbs = Databases::new();
|
let dbs = Store::new();
|
||||||
let key = "bvdfgert";
|
let key = "bvdfgert";
|
||||||
match dbs.get(key, "fred") {
|
match dbs.eq(key, "fred") {
|
||||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||||
Err(_) => (),
|
Err(_) => (),
|
||||||
}
|
}
|
||||||
@ -250,7 +250,7 @@ mod session_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unique_names() {
|
fn unique_names() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
let value = "wilma";
|
let value = "wilma";
|
||||||
dbs.add("database", value, "something").unwrap();
|
dbs.add("database", value, "something").unwrap();
|
||||||
match dbs.add("database", value, "overwrite") {
|
match dbs.add("database", value, "overwrite") {
|
||||||
@ -264,14 +264,14 @@ mod session_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_missing() {
|
fn get_missing() {
|
||||||
let dbs = Databases::new();
|
let dbs = Store::new();
|
||||||
let output = dbs.get("database", "melvin").unwrap();
|
let output = dbs.eq("database", "melvin").unwrap();
|
||||||
assert_eq!(output, Vec::<String>::new());
|
assert_eq!(output, Vec::<String>::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn list_bad_keys() {
|
fn list_bad_keys() {
|
||||||
let dbs = Databases::new();
|
let dbs = Store::new();
|
||||||
let key = "sdfgren";
|
let key = "sdfgren";
|
||||||
let keys = [key];
|
let keys = [key];
|
||||||
match dbs.list(keys.to_vec()) {
|
match dbs.list(keys.to_vec()) {
|
||||||
@ -282,7 +282,7 @@ mod session_data {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn list_is_sorted() {
|
fn list_is_sorted() {
|
||||||
let mut dbs = Databases::new();
|
let mut dbs = Store::new();
|
||||||
let mut data = ["fred", "barney", "wilma", "betty", "pebbles", "bambam"];
|
let mut data = ["fred", "barney", "wilma", "betty", "pebbles", "bambam"];
|
||||||
for db in data {
|
for db in data {
|
||||||
dbs.add("database", db, db).unwrap();
|
dbs.add("database", db, db).unwrap();
|
Loading…
Reference in New Issue
Block a user