Templating (#43)

Add new `templating` crate with custom parser/renderer for dealing with
variables
This commit is contained in:
Gregory Schier
2024-06-07 08:39:12 -07:00
committed by GitHub
parent 558b429807
commit 372588f541
13 changed files with 572 additions and 31 deletions

16
.github/workflows/ci-js.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
on: [push, pull_request]
name: CI (JS)
jobs:
test:
name: Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: lts/*
- run: npm ci
- run: npm run lint
- run: npm test

30
.github/workflows/ci-rust.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
on:
push:
paths:
- src-tauri/**
name: CI (Rust)
defaults:
run:
working-directory: src-tauri
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: 'src-tauri'
sparse-checkout-cone-mode: false
- run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: cargo check
- run: cargo test --all

5
src-tauri/Cargo.lock generated
View File

@@ -6225,6 +6225,10 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "templates"
version = "0.1.0"
[[package]]
name = "tendril"
version = "0.4.3"
@@ -7687,6 +7691,7 @@ dependencies = [
"tauri-plugin-shell",
"tauri-plugin-updater",
"tauri-plugin-window-state",
"templates",
"tokio",
"tokio-stream",
"uuid",

View File

@@ -1,4 +1,4 @@
workspace = { members = ["grpc"] }
workspace = { members = ["grpc", "templates"] }
[package]
name = "yaak-app"
@@ -24,6 +24,8 @@ cocoa = "0.25.0"
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
[dependencies]
grpc = { path = "./grpc" }
templates = { path = "./templates" }
base64 = "0.22.0"
boa_engine = { version = "0.18.0", features = ["annex-b"] }
boa_runtime = { version = "0.18.0" }
@@ -49,7 +51,6 @@ uuid = "1.7.0"
log = "0.4.21"
datetime = "0.5.2"
reqwest_cookie_store = "0.6.0"
grpc = { path = "./grpc" }
tokio-stream = "0.1.15"
regex = "1.10.2"
hex_color = "3.0.0"

View File

@@ -1,6 +1,6 @@
use std::fmt::Display;
use log::{debug, info, warn};
use log::{debug, info};
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::types::JsonValue;

View File

@@ -106,7 +106,7 @@ pub async fn send_http_request(
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
window,
)
.await;
.await;
}
};

View File

@@ -755,8 +755,8 @@ async fn cmd_import_data(
"importer-yaak",
"importer-curl",
];
let file = read_to_string(file_path)
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
let file =
read_to_string(file_path).unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
let file_contents = file.as_str();
for plugin_name in plugins {
let v = run_plugin_import(&w.app_handle(), plugin_name, file_contents)

View File

@@ -1,9 +1,11 @@
use std::collections::HashMap;
use regex::Regex;
use sqlx::types::{Json, JsonValue};
use crate::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter, Workspace};
use crate::models::{
Environment, EnvironmentVariable, HttpRequest, HttpRequestHeader, HttpUrlParameter, Workspace,
};
use templates::parse_and_render;
pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -> HttpRequest {
let r = r.clone();
@@ -64,30 +66,29 @@ pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -
}
pub fn render(template: &str, workspace: &Workspace, environment: Option<&Environment>) -> String {
let mut map = HashMap::new();
let workspace_variables = &workspace.variables.0;
for variable in workspace_variables {
let mut variables = HashMap::new();
variables = add_variable_to_map(variables, &workspace.variables.0);
if let Some(e) = environment {
variables = add_variable_to_map(variables, &e.variables.0);
}
parse_and_render(template, variables, None)
}
fn add_variable_to_map<'a>(
m: HashMap<&'a str, &'a str>,
variables: &'a Vec<EnvironmentVariable>,
) -> HashMap<&'a str, &'a str> {
let mut map = m.clone();
for variable in variables {
if !variable.enabled || variable.value.is_empty() {
continue;
}
map.insert(variable.name.as_str(), variable.value.as_str());
let name = variable.name.as_str();
let value = variable.value.as_str();
map.insert(name, value);
}
if let Some(e) = environment {
let environment_variables = &e.variables.0;
for variable in environment_variables {
if !variable.enabled || variable.value.is_empty() {
continue;
}
map.insert(variable.name.as_str(), variable.value.as_str());
}
}
Regex::new(r"\$\{\[\s*([^]\s]+)\s*]}")
.expect("Failed to create regex")
.replace_all(template, |caps: &regex::Captures| {
let key = caps.get(1).unwrap().as_str();
map.get(key).unwrap_or(&"")
})
.to_string()
map
}

View File

@@ -134,8 +134,11 @@ pub fn app_menu(app_handle: &AppHandle) -> tauri::Result<Menu<Wry>> {
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.generate_theme_css".to_string(), "Generate Theme CSS")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.generate_theme_css".to_string(),
"Generate Theme CSS",
)
.build(app_handle)?,
],
)?,
],

View File

@@ -0,0 +1,6 @@
[package]
name = "templates"
version = "0.1.0"
edition = "2021"
[dependencies]

View File

@@ -0,0 +1,7 @@
pub mod parser;
pub mod renderer;
pub use parser::*;
pub use renderer::*;
pub fn template_foo() {}

View File

@@ -0,0 +1,370 @@
#[derive(Clone, PartialEq, Debug)]
pub enum Val {
Str(String),
Ident(String),
}
#[derive(Clone, PartialEq, Debug)]
pub enum Token {
Raw(String),
Var { name: String },
Fn { name: String, args: Vec<Val> },
Eof,
}
// Template Syntax
//
// ${[ my_var ]}
// ${[ my_fn() ]}
// ${[ my_fn(my_var) ]}
// ${[ my_fn(my_var, "A String") ]}
// default
#[derive(Default)]
pub struct Parser {
tokens: Vec<Token>,
chars: Vec<char>,
pos: usize,
curr_text: String,
}
impl Parser {
pub fn new(text: &str) -> Parser {
Parser {
chars: text.chars().collect(),
..Parser::default()
}
}
pub fn parse(&mut self) -> Vec<Token> {
let start_pos = self.pos;
while self.pos < self.chars.len() {
if self.match_str("${[") {
let start_curr = self.pos;
if let Some(t) = self.parse_tag() {
self.push_token(t);
} else {
self.pos = start_curr;
self.curr_text += "${[";
}
} else {
let ch = self.next_char();
self.curr_text.push(ch);
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
self.push_token(Token::Eof);
self.tokens.clone()
}
fn parse_tag(&mut self) -> Option<Token> {
// Parse up to first identifier
// ${[ my_var...
self.skip_whitespace();
let name = match self.parse_ident() {
None => return None,
Some(v) => v,
};
// Parse fn args if they exist
// ${[ my_var(a, b, c)
let args = if self.match_str("(") {
self.parse_fn_args()
} else {
None
};
// Parse to closing tag
// ${[ my_var(a, b, c) ]}
self.skip_whitespace();
if !self.match_str("]}") {
return None;
}
Some(match args {
Some(a) => Token::Fn { args: a, name },
None => Token::Var { name },
})
}
#[allow(dead_code)]
fn debug_pos(&self, x: &str) {
println!(
r#"Position: {x} -- [{}] = {} --> "{}"#,
self.pos,
self.chars[self.pos],
self.chars.iter().collect::<String>()
);
}
fn parse_fn_args(&mut self) -> Option<Vec<Val>> {
let start_pos = self.pos;
let mut args: Vec<Val> = Vec::new();
while self.pos < self.chars.len() {
self.skip_whitespace();
if let Some(v) = self.parse_ident_or_string() {
args.push(v);
}
self.skip_whitespace();
if self.match_str(")") {
break;
}
self.skip_whitespace();
// If we don't find a comma, that's bad
if !args.is_empty() && !self.match_str(",") {
return None;
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
return Some(args);
}
fn parse_ident_or_string(&mut self) -> Option<Val> {
if let Some(i) = self.parse_ident() {
Some(Val::Ident(i))
} else if let Some(s) = self.parse_string() {
Some(Val::Str(s))
} else {
None
}
}
fn parse_ident(&mut self) -> Option<String> {
let start_pos = self.pos;
let mut text = String::new();
while self.pos < self.chars.len() {
let ch = self.peek_char();
if ch.is_alphanumeric() || ch == '_' {
text.push(ch);
self.pos += 1;
} else {
break;
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
if text.is_empty() {
return None;
}
return Some(text);
}
fn parse_string(&mut self) -> Option<String> {
let start_pos = self.pos;
let mut text = String::new();
if !self.match_str("\"") {
return None;
}
let mut found_closing = false;
while self.pos < self.chars.len() {
let ch = self.next_char();
match ch {
'\\' => {
text.push(self.next_char());
}
'"' => {
found_closing = true;
break;
}
_ => {
text.push(ch);
}
}
if start_pos == self.pos {
panic!("Parser stuck!");
}
}
if !found_closing {
self.pos = start_pos;
return None;
}
return Some(text);
}
fn skip_whitespace(&mut self) {
while self.pos < self.chars.len() {
if self.peek_char().is_whitespace() {
self.pos += 1;
} else {
break;
}
}
}
fn next_char(&mut self) -> char {
let ch = self.peek_char();
self.pos += 1;
ch
}
fn peek_char(&self) -> char {
let ch = self.chars[self.pos];
ch
}
fn push_token(&mut self, token: Token) {
// Push any text we've accumulated
if !self.curr_text.is_empty() {
let text_token = Token::Raw(self.curr_text.clone());
self.tokens.push(text_token);
self.curr_text.clear();
}
self.tokens.push(token);
}
fn match_str(&mut self, value: &str) -> bool {
if self.pos + value.len() > self.chars.len() {
return false;
}
let cmp = self.chars[self.pos..self.pos + value.len()]
.iter()
.collect::<String>();
if cmp == value {
// We have a match, so advance the current index
self.pos += value.len();
true
} else {
false
}
}
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn var_simple() {
let mut p = Parser::new("${[ foo ]}");
assert_eq!(
p.parse(),
vec![Token::Var { name: "foo".into() }, Token::Eof]
);
}
#[test]
fn var_multiple_names_invalid() {
let mut p = Parser::new("${[ foo bar ]}");
assert_eq!(
p.parse(),
vec![Token::Raw("${[ foo bar ]}".into()), Token::Eof]
);
}
#[test]
fn tag_string() {
let mut p = Parser::new(r#"${[ "foo \"bar\" baz" ]}"#);
assert_eq!(
p.parse(),
vec![Token::Raw(r#"${[ "foo \"bar\" baz" ]}"#.into()), Token::Eof]
);
}
#[test]
fn var_surrounded() {
let mut p = Parser::new("Hello ${[ foo ]}!");
assert_eq!(
p.parse(),
vec![
Token::Raw("Hello ".to_string()),
Token::Var { name: "foo".into() },
Token::Raw("!".to_string()),
Token::Eof,
]
);
}
#[test]
fn fn_simple() {
let mut p = Parser::new("${[ foo() ]}");
assert_eq!(
p.parse(),
vec![
Token::Fn {
name: "foo".into(),
args: Vec::new(),
},
Token::Eof
]
);
}
#[test]
fn fn_ident_arg() {
let mut p = Parser::new("${[ foo(bar) ]}");
assert_eq!(
p.parse(),
vec![
Token::Fn {
name: "foo".into(),
args: vec![Val::Ident("bar".into())],
},
Token::Eof
]
);
}
#[test]
fn fn_ident_args() {
let mut p = Parser::new("${[ foo(bar,baz, qux ) ]}");
assert_eq!(
p.parse(),
vec![
Token::Fn {
name: "foo".into(),
args: vec![
Val::Ident("bar".into()),
Val::Ident("baz".into()),
Val::Ident("qux".into()),
],
},
Token::Eof
]
);
}
#[test]
fn fn_mixed_args() {
let mut p = Parser::new(r#"${[ foo(bar,"baz \"hi\"", qux ) ]}"#);
assert_eq!(
p.parse(),
vec![
Token::Fn {
name: "foo".into(),
args: vec![
Val::Ident("bar".into()),
Val::Str(r#"baz "hi""#.into()),
Val::Ident("qux".into()),
],
},
Token::Eof
]
);
}
}

View File

@@ -0,0 +1,102 @@
use crate::{Parser, Token, Val};
use std::collections::HashMap;
type TemplateCallback = fn(name: &str, args: Vec<&str>) -> String;
pub fn parse_and_render(
template: &str,
vars: HashMap<&str, &str>,
cb: Option<TemplateCallback>,
) -> String {
let mut p = Parser::new(template);
let tokens = p.parse();
render(tokens, vars, cb)
}
pub fn render(
tokens: Vec<Token>,
vars: HashMap<&str, &str>,
cb: Option<TemplateCallback>,
) -> String {
let mut doc_str: Vec<String> = Vec::new();
for t in tokens {
match t {
Token::Raw(s) => doc_str.push(s),
Token::Var { name } => {
if let Some(v) = vars.get(name.as_str()) {
doc_str.push(v.to_string());
}
}
Token::Fn { name, args } => {
let empty = &"";
let resolved_args = args
.iter()
.map(|a| match a {
Val::Str(s) => s.as_str(),
Val::Ident(i) => vars.get(i.as_str()).unwrap_or(empty),
})
.collect();
let val = match cb {
Some(cb) => cb(name.as_str(), resolved_args),
None => "".into(),
};
doc_str.push(val);
}
Token::Eof => {}
}
}
return doc_str.join("");
}
#[cfg(test)]
mod tests {
use crate::*;
use std::collections::HashMap;
#[test]
fn render_empty() {
let template = "";
let vars = HashMap::new();
let result = "";
assert_eq!(parse_and_render(template, vars, None), result.to_string());
}
#[test]
fn render_text_only() {
let template = "Hello World!";
let vars = HashMap::new();
let result = "Hello World!";
assert_eq!(parse_and_render(template, vars, None), result.to_string());
}
#[test]
fn render_simple() {
let template = "${[ foo ]}";
let vars = HashMap::from([("foo", "bar")]);
let result = "bar";
assert_eq!(parse_and_render(template, vars, None), result.to_string());
}
#[test]
fn render_surrounded() {
let template = "hello ${[ word ]} world!";
let vars = HashMap::from([("word", "cruel")]);
let result = "hello cruel world!";
assert_eq!(parse_and_render(template, vars, None), result.to_string());
}
#[test]
fn render_valid_fn() {
let vars = HashMap::new();
let template = r#"${[ say_hello("John", "Kate") ]}"#;
let result = r#"say_hello: ["John", "Kate"]"#;
let cb: fn(&str, Vec<&str>) -> String =
|name: &str, args: Vec<&str>| format!("{name}: {:?}", args);
assert_eq!(
parse_and_render(template, vars, Some(cb)),
result.to_string()
);
}
}