Filesystem Sync (#142)

This commit is contained in:
Gregory Schier
2025-01-03 20:41:00 -08:00
committed by GitHub
parent 6ad27c4458
commit 31440eea76
159 changed files with 4296 additions and 1016 deletions

View File

@@ -0,0 +1,304 @@
enum FormatState {
TemplateTag,
String,
None,
}
/// Formats JSON that might contain template tags (skipped entirely)
pub fn format_json(text: &str, tab: &str) -> String {
let mut chars = text.chars().peekable();
let mut new_json = "".to_string();
let mut depth = 0;
let mut state = FormatState::None;
loop {
let rest_of_chars = chars.clone();
let current_char = match chars.next() {
None => break,
Some(c) => c,
};
// Handle JSON string states
if let FormatState::String = state {
match current_char {
'"' => {
state = FormatState::None;
new_json.push(current_char);
continue;
}
'\\' => {
new_json.push(current_char);
if let Some(c) = chars.next() {
new_json.push(c);
}
continue;
}
_ => {
new_json.push(current_char);
continue;
}
}
}
// Close Template tag states
if let FormatState::TemplateTag = state {
if rest_of_chars.take(2).collect::<String>() == "]}" {
state = FormatState::None;
new_json.push_str("]}");
chars.next(); // Skip the second closing bracket
continue;
} else {
new_json.push(current_char);
continue;
}
}
if rest_of_chars.take(3).collect::<String>() == "${[" {
state = FormatState::TemplateTag;
new_json.push_str("${[");
chars.next(); // Skip {
chars.next(); // Skip [
continue;
}
match current_char {
',' => {
new_json.push(current_char);
new_json.push('\n');
new_json.push_str(tab.to_string().repeat(depth).as_str());
}
'{' => match chars.peek() {
Some('}') => {
new_json.push(current_char);
new_json.push('}');
chars.next(); // Skip }
}
_ => {
depth += 1;
new_json.push(current_char);
new_json.push('\n');
new_json.push_str(tab.to_string().repeat(depth).as_str());
}
},
'[' => match chars.peek() {
Some(']') => {
new_json.push(current_char);
new_json.push(']');
chars.next(); // Skip ]
}
_ => {
depth += 1;
new_json.push(current_char);
new_json.push('\n');
new_json.push_str(tab.to_string().repeat(depth).as_str());
}
},
'}' => {
// Guard just in case invalid JSON has more closes than opens
if depth > 0 {
depth -= 1;
}
new_json.push('\n');
new_json.push_str(tab.to_string().repeat(depth).as_str());
new_json.push(current_char);
}
']' => {
// Guard just in case invalid JSON has more closes than opens
if depth > 0 {
depth -= 1;
}
new_json.push('\n');
new_json.push_str(tab.to_string().repeat(depth).as_str());
new_json.push(current_char);
}
':' => {
new_json.push(current_char);
new_json.push(' '); // Pad with space
}
'"' => {
state = FormatState::String;
new_json.push(current_char);
}
_ => {
if current_char == ' '
|| current_char == '\n'
|| current_char == '\t'
|| current_char == '\r'
{
// Don't add these
} else {
new_json.push(current_char);
}
}
}
}
// Replace only lines containing whitespace with nothing
new_json
.lines()
.filter(|line| !line.trim().is_empty()) // Filter out whitespace-only lines
.collect::<Vec<&str>>() // Collect the non-empty lines into a vector
.join("\n") // Join the lines back into a single string
}
#[cfg(test)]
mod test {
use crate::format::format_json;
#[test]
fn test_simple_object() {
assert_eq!(
format_json(r#"{"foo":"bar","baz":"qux"}"#, " "),
r#"
{
"foo": "bar",
"baz": "qux"
}
"#
.trim()
);
}
#[test]
fn test_escaped() {
assert_eq!(
format_json(r#"{"foo":"Hi \"world!\""}"#, " "),
r#"
{
"foo": "Hi \"world!\""
}
"#
.trim()
);
}
#[test]
fn test_simple_array() {
assert_eq!(
format_json(r#"["foo","bar","baz","qux"]"#, " "),
r#"
[
"foo",
"bar",
"baz",
"qux"
]
"#
.trim()
);
}
#[test]
fn test_extra_whitespace() {
assert_eq!(
format_json(
r#"["foo", "bar", "baz","qux"
]"#,
" "
),
r#"
[
"foo",
"bar",
"baz",
"qux"
]
"#
.trim()
);
}
#[test]
fn test_invalid_json() {
assert_eq!(
format_json(r#"["foo", {"bar", }"baz",["qux" ]]"#, " "),
r#"
[
"foo",
{
"bar",
}"baz",
[
"qux"
]
]
"#
.trim()
);
}
#[test]
fn test_skip_template_tags() {
assert_eq!(
format_json(r#"{"foo":${[ fn("hello", "world") ]} }"#, " "),
r#"
{
"foo": ${[ fn("hello", "world") ]}
}
"#
.trim()
);
}
#[test]
fn test_graphql_response() {
assert_eq!(
format_json(
r#"{"data":{"capsules":[{"landings":null,"original_launch":null,"reuse_count":0,"status":"retired","type":"Dragon 1.0","missions":null},{"id":"5e9e2c5bf3591882af3b2665","landings":null,"original_launch":null,"reuse_count":0,"status":"retired","type":"Dragon 1.0","missions":null}]}}"#,
" "
),
r#"
{
"data": {
"capsules": [
{
"landings": null,
"original_launch": null,
"reuse_count": 0,
"status": "retired",
"type": "Dragon 1.0",
"missions": null
},
{
"id": "5e9e2c5bf3591882af3b2665",
"landings": null,
"original_launch": null,
"reuse_count": 0,
"status": "retired",
"type": "Dragon 1.0",
"missions": null
}
]
}
}
"#
.trim()
);
}
#[test]
fn test_immediate_close() {
assert_eq!(
format_json(r#"{"bar":[]}"#, " "),
r#"
{
"bar": []
}
"#
.trim()
);
}
#[test]
fn test_more_closes() {
assert_eq!(
format_json(r#"{}}"#, " "),
r#"
{}
}
"#
.trim()
);
}
}

View File

@@ -0,0 +1,6 @@
pub mod parser;
pub mod renderer;
pub mod format;
pub use parser::*;
pub use renderer::*;

View File

@@ -0,0 +1,776 @@
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use ts_rs::TS;
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize, TS)]
#[ts(export, export_to="parser.ts")]
pub struct Tokens {
pub tokens: Vec<Token>,
}
impl Display for Tokens {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let str = self
.tokens
.iter()
.map(|t| t.to_string())
.collect::<Vec<String>>()
.join("");
write!(f, "{}", str)
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize, TS)]
#[ts(export, export_to="parser.ts")]
pub struct FnArg {
pub name: String,
pub value: Val,
}
impl Display for FnArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let str = format!("{}={}", self.name, self.value);
write!(f, "{}", str)
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to="parser.ts")]
pub enum Val {
Str { text: String },
Var { name: String },
Bool { value: bool },
Fn { name: String, args: Vec<FnArg> },
Null,
}
impl Display for Val {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let str = match self {
Val::Str { text } => format!("'{}'", text.to_string().replace("'", "\'")),
Val::Var { name } => name.to_string(),
Val::Bool { value } => value.to_string(),
Val::Fn { name, args } => {
format!(
"{name}({})",
args.iter()
.filter_map(|a| match a.value.clone() {
Val::Null => None,
_ => Some(a.to_string()),
})
.collect::<Vec<String>>()
.join(", ")
)
}
Val::Null => "null".to_string(),
};
write!(f, "{}", str)
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to="parser.ts")]
pub enum Token {
Raw { text: String },
Tag { val: Val },
Eof,
}
impl Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let str = match self {
Token::Raw { text } => text.to_string(),
Token::Tag { val } => format!("${{[ {} ]}}", val.to_string()),
Token::Eof => "".to_string(),
};
write!(f, "{}", str)
}
}
// Template Syntax
//
// ${[ my_var ]}
// ${[ my_fn() ]}
// ${[ my_fn(my_var) ]}
// ${[ my_fn(my_var, "A String") ]}
// default
#[derive(Default)]
pub struct Parser {
tokens: Vec<Token>,
chars: Vec<char>,
pos: usize,
curr_text: String,
}
impl Parser {
pub fn new(text: &str) -> Parser {
Parser {
chars: text.chars().collect(),
..Parser::default()
}
}
pub fn parse(&mut self) -> Tokens {
let start_pos = self.pos;
while self.pos < self.chars.len() {
if self.match_str("${[") {
let start_curr = self.pos;
if let Some(t) = self.parse_tag() {
self.push_token(t);
} else {
self.pos = start_curr;
self.curr_text += "${[";
}
} else {
let ch = self.next_char();
self.curr_text.push(ch);
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
self.push_token(Token::Eof);
Tokens {
tokens: self.tokens.clone(),
}
}
fn parse_tag(&mut self) -> Option<Token> {
// Parse up to first identifier
// ${[ my_var...
self.skip_whitespace();
let val = match self.parse_value() {
Some(v) => v,
None => return None,
};
// Parse to closing tag
// ${[ my_var(a, b, c) ]}
self.skip_whitespace();
if !self.match_str("]}") {
return None;
}
Some(Token::Tag { val })
}
#[allow(dead_code)]
fn debug_pos(&self, x: &str) {
println!(
r#"Position: {x}: text[{}]='{}' → "{}" → {:?}"#,
self.pos,
self.chars[self.pos],
self.chars.iter().collect::<String>(),
self.tokens,
);
}
fn parse_value(&mut self) -> Option<Val> {
if let Some((name, args)) = self.parse_fn() {
Some(Val::Fn { name, args })
} else if let Some(v) = self.parse_ident() {
if v == "null" {
Some(Val::Null)
} else if v == "true" {
Some(Val::Bool { value: true })
} else if v == "false" {
Some(Val::Bool { value: false })
} else {
Some(Val::Var { name: v })
}
} else if let Some(v) = self.parse_string() {
Some(Val::Str { text: v })
} else {
None
}
}
fn parse_fn(&mut self) -> Option<(String, Vec<FnArg>)> {
let start_pos = self.pos;
let name = match self.parse_fn_name() {
Some(v) => v,
None => {
self.pos = start_pos;
return None;
}
};
let args = match self.parse_fn_args() {
Some(args) => args,
None => {
self.pos = start_pos;
return None;
}
};
Some((name, args))
}
fn parse_fn_args(&mut self) -> Option<Vec<FnArg>> {
if !self.match_str("(") {
return None;
}
let start_pos = self.pos;
let mut args: Vec<FnArg> = Vec::new();
// Fn closed immediately
self.skip_whitespace();
if self.match_str(")") {
return Some(args);
}
while self.pos < self.chars.len() {
self.skip_whitespace();
let name = self.parse_ident();
self.skip_whitespace();
self.match_str("=");
self.skip_whitespace();
let value = self.parse_value();
self.skip_whitespace();
if let (Some(name), Some(value)) = (name.clone(), value.clone()) {
args.push(FnArg { name, value });
} else {
// Didn't find valid thing, so return
self.pos = start_pos;
return None;
}
if self.match_str(")") {
break;
}
self.skip_whitespace();
// If we don't find a comma, that's bad
if !args.is_empty() && !self.match_str(",") {
self.pos = start_pos;
return None;
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
Some(args)
}
fn parse_ident(&mut self) -> Option<String> {
let start_pos = self.pos;
let mut text = String::new();
while self.pos < self.chars.len() {
let ch = self.peek_char();
if ch.is_alphanumeric() || ch == '_' {
text.push(ch);
self.pos += 1;
} else {
break;
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
if text.is_empty() {
self.pos = start_pos;
return None;
}
Some(text)
}
fn parse_fn_name(&mut self) -> Option<String> {
let start_pos = self.pos;
let mut text = String::new();
while self.pos < self.chars.len() {
let ch = self.peek_char();
if ch.is_alphanumeric() || ch == '_' || ch == '.' {
text.push(ch);
self.pos += 1;
} else {
break;
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
if text.is_empty() {
self.pos = start_pos;
return None;
}
Some(text)
}
fn parse_string(&mut self) -> Option<String> {
let start_pos = self.pos;
let mut text = String::new();
if !self.match_str("'") {
return None;
}
let mut found_closing = false;
while self.pos < self.chars.len() {
let ch = self.next_char();
match ch {
'\\' => {
text.push(self.next_char());
}
'\'' => {
found_closing = true;
break;
}
_ => {
text.push(ch);
}
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
if !found_closing {
self.pos = start_pos;
return None;
}
Some(text)
}
fn skip_whitespace(&mut self) {
while self.pos < self.chars.len() {
if self.peek_char().is_whitespace() {
self.pos += 1;
} else {
break;
}
}
}
fn next_char(&mut self) -> char {
let ch = self.peek_char();
self.pos += 1;
ch
}
fn peek_char(&self) -> char {
let ch = self.chars[self.pos];
ch
}
fn push_token(&mut self, token: Token) {
// Push any text we've accumulated
if !self.curr_text.is_empty() {
let text_token = Token::Raw {
text: self.curr_text.clone(),
};
self.tokens.push(text_token);
self.curr_text.clear();
}
self.tokens.push(token);
}
fn match_str(&mut self, value: &str) -> bool {
if self.pos + value.len() > self.chars.len() {
return false;
}
let cmp = self.chars[self.pos..self.pos + value.len()]
.iter()
.collect::<String>();
if cmp == value {
// We have a match, so advance the current index
self.pos += value.len();
true
} else {
false
}
}
}
#[cfg(test)]
mod tests {
use crate::Val::Null;
use crate::*;
#[test]
fn var_simple() {
let mut p = Parser::new("${[ foo ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Var { name: "foo".into() }
},
Token::Eof
]
);
}
#[test]
fn var_boolean() {
let mut p = Parser::new("${[ true ]}${[ false ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Bool { value: true },
},
Token::Tag {
val: Val::Bool { value: false },
},
Token::Eof
]
);
}
#[test]
fn var_multiple_names_invalid() {
let mut p = Parser::new("${[ foo bar ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Raw {
text: "${[ foo bar ]}".into()
},
Token::Eof
]
);
}
#[test]
fn tag_string() {
let mut p = Parser::new(r#"${[ 'foo \'bar\' baz' ]}"#);
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Str {
text: r#"foo 'bar' baz"#.into()
}
},
Token::Eof
]
);
}
#[test]
fn var_surrounded() {
let mut p = Parser::new("Hello ${[ foo ]}!");
assert_eq!(
p.parse().tokens,
vec![
Token::Raw {
text: "Hello ".to_string()
},
Token::Tag {
val: Val::Var { name: "foo".into() }
},
Token::Raw {
text: "!".to_string()
},
Token::Eof,
]
);
}
#[test]
fn fn_simple() {
let mut p = Parser::new("${[ foo() ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: Vec::new(),
}
},
Token::Eof
]
);
}
#[test]
fn fn_dot_name() {
let mut p = Parser::new("${[ foo.bar.baz() ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo.bar.baz".into(),
args: Vec::new(),
}
},
Token::Eof
]
);
}
#[test]
fn fn_ident_arg() {
let mut p = Parser::new("${[ foo(a=bar) ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: vec![FnArg {
name: "a".into(),
value: Val::Var { name: "bar".into() }
}],
}
},
Token::Eof
]
);
}
#[test]
fn fn_ident_args() {
let mut p = Parser::new("${[ foo(a=bar,b = baz, c =qux ) ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: vec![
FnArg {
name: "a".into(),
value: Val::Var { name: "bar".into() }
},
FnArg {
name: "b".into(),
value: Val::Var { name: "baz".into() }
},
FnArg {
name: "c".into(),
value: Val::Var { name: "qux".into() }
},
],
}
},
Token::Eof
]
);
}
#[test]
fn fn_mixed_args() {
let mut p = Parser::new(r#"${[ foo(aaa=bar,bb='baz \'hi\'', c=qux, z=true ) ]}"#);
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: vec![
FnArg {
name: "aaa".into(),
value: Val::Var { name: "bar".into() }
},
FnArg {
name: "bb".into(),
value: Val::Str {
text: r#"baz 'hi'"#.into()
}
},
FnArg {
name: "c".into(),
value: Val::Var { name: "qux".into() }
},
FnArg {
name: "z".into(),
value: Val::Bool { value: true }
},
],
}
},
Token::Eof
]
);
}
#[test]
fn fn_nested() {
let mut p = Parser::new("${[ foo(b=bar()) ]}");
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: vec![FnArg {
name: "b".into(),
value: Val::Fn {
name: "bar".into(),
args: vec![],
}
}],
}
},
Token::Eof
]
);
}
#[test]
fn fn_nested_args() {
let mut p = Parser::new(r#"${[ outer(a=inner(a=foo, b='i'), c='o') ]}"#);
assert_eq!(
p.parse().tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "outer".into(),
args: vec![
FnArg {
name: "a".into(),
value: Val::Fn {
name: "inner".into(),
args: vec![
FnArg {
name: "a".into(),
value: Val::Var { name: "foo".into() }
},
FnArg {
name: "b".into(),
value: Val::Str { text: "i".into() },
},
],
}
},
FnArg {
name: "c".into(),
value: Val::Str { text: "o".into() }
},
],
}
},
Token::Eof
]
);
}
#[test]
fn token_display_var() {
assert_eq!(
Val::Var {
name: "foo".to_string()
}
.to_string(),
"foo"
);
}
#[test]
fn token_display_str() {
assert_eq!(
Val::Str {
text: "Hello 'You'".to_string()
}
.to_string(),
"'Hello \'You\''"
);
}
#[test]
fn token_null_fn_arg() {
assert_eq!(
Val::Fn {
name: "fn".to_string(),
args: vec![
FnArg {
name: "n".to_string(),
value: Null,
},
FnArg {
name: "a".to_string(),
value: Val::Str {
text: "aaa".to_string()
}
}
]
}
.to_string(),
r#"fn(a='aaa')"#
);
}
#[test]
fn token_display_fn() {
assert_eq!(
Token::Tag {
val: Val::Fn {
name: "foo".to_string(),
args: vec![
FnArg {
name: "arg".to_string(),
value: Val::Str {
text: "v".to_string()
}
},
FnArg {
name: "arg2".to_string(),
value: Val::Var {
name: "my_var".to_string()
}
}
]
}
}
.to_string(),
r#"${[ foo(arg='v', arg2=my_var) ]}"#
);
}
#[test]
fn tokens_display() {
assert_eq!(
Tokens {
tokens: vec![
Token::Tag {
val: Val::Var {
name: "my_var".to_string()
}
},
Token::Raw {
text: " Some cool text ".to_string(),
},
Token::Tag {
val: Val::Str {
text: "Hello World".to_string()
}
}
]
}
.to_string(),
r#"${[ my_var ]} Some cool text ${[ 'Hello World' ]}"#
);
}
}

View File

@@ -0,0 +1,249 @@
use crate::{FnArg, Parser, Token, Tokens, Val};
use log::warn;
use std::collections::HashMap;
use std::future::Future;
pub trait TemplateCallback {
fn run(
&self,
fn_name: &str,
args: HashMap<String, String>,
) -> impl Future<Output = Result<String, String>> + Send;
}
pub async fn parse_and_render<T: TemplateCallback>(
template: &str,
vars: &HashMap<String, String>,
cb: &T,
) -> String {
let mut p = Parser::new(template);
let tokens = p.parse();
render(tokens, vars, cb).await
}
pub async fn render<T: TemplateCallback>(
tokens: Tokens,
vars: &HashMap<String, String>,
cb: &T,
) -> String {
let mut doc_str: Vec<String> = Vec::new();
for t in tokens.tokens {
match t {
Token::Raw { text } => doc_str.push(text),
Token::Tag { val } => doc_str.push(render_tag(val, &vars, cb).await),
Token::Eof => {}
}
}
doc_str.join("")
}
async fn render_tag<T: TemplateCallback>(
val: Val,
vars: &HashMap<String, String>,
cb: &T,
) -> String {
match val {
Val::Str { text } => text.into(),
Val::Var { name } => match vars.get(name.as_str()) {
Some(v) => {
let r = Box::pin(parse_and_render(v, vars, cb)).await;
r.to_string()
}
None => "".into(),
},
Val::Bool { value } => value.to_string(),
Val::Fn { name, args } => {
let empty = "".to_string();
let mut resolved_args: HashMap<String, String> = HashMap::new();
for a in args {
let (k, v) = match a {
FnArg {
name,
value: Val::Str { text },
} => (name.to_string(), text.to_string()),
FnArg {
name,
value: Val::Var { name: var_name },
} => (
name.to_string(),
vars.get(var_name.as_str()).unwrap_or(&empty).to_string(),
),
FnArg { name, value: val } => {
let r = Box::pin(render_tag(val.clone(), vars, cb)).await;
(name.to_string(), r)
}
};
resolved_args.insert(k, v);
}
match cb.run(name.as_str(), resolved_args.clone()).await {
Ok(s) => s,
Err(e) => {
warn!(
"Failed to run template callback {}({:?}): {}",
name, resolved_args, e
);
"".to_string()
}
}
}
Val::Null => "".into(),
}
}
#[cfg(test)]
mod tests {
use crate::renderer::TemplateCallback;
use crate::*;
use std::collections::HashMap;
struct EmptyCB {}
impl TemplateCallback for EmptyCB {
async fn run(
&self,
_fn_name: &str,
_args: HashMap<String, String>,
) -> Result<String, String> {
todo!()
}
}
#[tokio::test]
async fn render_empty() {
let empty_cb = EmptyCB {};
let template = "";
let vars = HashMap::new();
let result = "";
assert_eq!(
parse_and_render(template, &vars, &empty_cb).await,
result.to_string()
);
}
#[tokio::test]
async fn render_text_only() {
let empty_cb = EmptyCB {};
let template = "Hello World!";
let vars = HashMap::new();
let result = "Hello World!";
assert_eq!(
parse_and_render(template, &vars, &empty_cb).await,
result.to_string()
);
}
#[tokio::test]
async fn render_simple() {
let empty_cb = EmptyCB {};
let template = "${[ foo ]}";
let vars = HashMap::from([("foo".to_string(), "bar".to_string())]);
let result = "bar";
assert_eq!(
parse_and_render(template, &vars, &empty_cb).await,
result.to_string()
);
}
#[tokio::test]
async fn render_recursive_var() {
let empty_cb = EmptyCB {};
let template = "${[ foo ]}";
let mut vars = HashMap::new();
vars.insert("foo".to_string(), "foo: ${[ bar ]}".to_string());
vars.insert("bar".to_string(), "bar: ${[ baz ]}".to_string());
vars.insert("baz".to_string(), "baz".to_string());
let result = "foo: bar: baz";
assert_eq!(
parse_and_render(template, &vars, &empty_cb).await,
result.to_string()
);
}
#[tokio::test]
async fn render_surrounded() {
let empty_cb = EmptyCB {};
let template = "hello ${[ word ]} world!";
let vars = HashMap::from([("word".to_string(), "cruel".to_string())]);
let result = "hello cruel world!";
assert_eq!(
parse_and_render(template, &vars, &empty_cb).await,
result.to_string()
);
}
#[tokio::test]
async fn render_valid_fn() {
let vars = HashMap::new();
let template = r#"${[ say_hello(a='John', b='Kate') ]}"#;
let result = r#"say_hello: 2, Some("John") Some("Kate")"#;
struct CB {}
impl TemplateCallback for CB {
async fn run(
&self,
fn_name: &str,
args: HashMap<String, String>,
) -> Result<String, String> {
Ok(format!(
"{fn_name}: {}, {:?} {:?}",
args.len(),
args.get("a"),
args.get("b")
))
}
}
assert_eq!(parse_and_render(template, &vars, &CB {}).await, result);
}
#[tokio::test]
async fn render_nested_fn() {
let vars = HashMap::new();
let template = r#"${[ upper(foo=secret()) ]}"#;
let result = r#"ABC"#;
struct CB {}
impl TemplateCallback for CB {
async fn run(
&self,
fn_name: &str,
args: HashMap<String, String>,
) -> Result<String, String> {
Ok(match fn_name {
"secret" => "abc".to_string(),
"upper" => args["foo"].to_string().to_uppercase(),
_ => "".to_string(),
})
}
}
assert_eq!(
parse_and_render(template, &vars, &CB {}).await,
result.to_string()
);
}
#[tokio::test]
async fn render_fn_err() {
let vars = HashMap::new();
let template = r#"${[ error() ]}"#;
let result = r#""#;
struct CB {}
impl TemplateCallback for CB {
async fn run(
&self,
_fn_name: &str,
_args: HashMap<String, String>,
) -> Result<String, String> {
Err("Failed to do it!".to_string())
}
}
assert_eq!(
parse_and_render(template, &vars, &CB {}).await,
result.to_string()
);
}
}