diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 508eeaba..e4bedc06 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -60,3 +60,10 @@ Run the app to apply the migrations. If nothing happens, try `cargo clean` and run the app again. _Note: Development builds use a separate database location from production builds._ + +## Lezer Grammer Generation + +```sh +# Example +lezer-generator components/core/Editor//.grammar > components/core/Editor//.ts +``` diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 6e55d2c8..83c90a4e 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -7838,6 +7838,7 @@ dependencies = [ name = "yaak-templates" version = "0.1.0" dependencies = [ + "base64 0.22.1", "log", "serde", "serde_json", diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 44e9cfff..4eb486af 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -114,8 +114,8 @@ async fn cmd_metadata(app_handle: AppHandle) -> Result { } #[tauri::command] -async fn cmd_parse_template(template: &str) -> Result { - Ok(Parser::new(template).parse()) +async fn cmd_parse_template(template: &str) -> YaakResult { + Ok(Parser::new(template).parse()?) } #[tauri::command] diff --git a/src-tauri/yaak-templates/Cargo.toml b/src-tauri/yaak-templates/Cargo.toml index e6dae6ec..e960475a 100644 --- a/src-tauri/yaak-templates/Cargo.toml +++ b/src-tauri/yaak-templates/Cargo.toml @@ -5,9 +5,10 @@ edition = "2021" publish = false [dependencies] +base64 = "0.22.1" +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } log = "0.4.22" -serde = { version = "1.0.208", features = ["derive"] } -serde_json = "1.0.132" thiserror = { workspace = true } tokio = { version = "1.39.3", features = ["macros", "rt"] } ts-rs = { version = "10.0.0" } diff --git a/src-tauri/yaak-templates/src/parser.rs b/src-tauri/yaak-templates/src/parser.rs index f4d6791c..b0f009ee 100644 --- a/src-tauri/yaak-templates/src/parser.rs +++ b/src-tauri/yaak-templates/src/parser.rs @@ -1,3 +1,7 @@ +use crate::error::Error::RenderError; +use crate::error::Result; +use base64::prelude::BASE64_URL_SAFE_NO_PAD; +use base64::Engine; use serde::{Deserialize, Serialize}; use std::fmt::Display; use ts_rs::TS; @@ -43,7 +47,13 @@ pub enum Val { impl Display for Val { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let str = match self { - Val::Str { text } => format!("'{}'", text.to_string().replace("'", "\'")), + Val::Str { text } => { + if text.chars().all(|c| c.is_alphanumeric() || c == ' ' || c == '_' || c == '_') { + format!("'{}'", text) + } else { + format!("b64'{}'", BASE64_URL_SAFE_NO_PAD.encode(text)) + } + } Val::Var { name } => name.to_string(), Val::Bool { value } => value.to_string(), Val::Fn { name, args } => { @@ -108,13 +118,13 @@ impl Parser { } } - pub fn parse(&mut self) -> Tokens { + pub fn parse(&mut self) -> Result { let start_pos = self.pos; while self.pos < self.chars.len() { if self.match_str("${[") { let start_curr = self.pos; - if let Some(t) = self.parse_tag() { + if let Some(t) = self.parse_tag()? { self.push_token(t); } else { self.pos = start_curr; @@ -131,29 +141,29 @@ impl Parser { } self.push_token(Token::Eof); - Tokens { + Ok(Tokens { tokens: self.tokens.clone(), - } + }) } - fn parse_tag(&mut self) -> Option { + fn parse_tag(&mut self) -> Result> { // Parse up to first identifier // ${[ my_var... self.skip_whitespace(); - let val = match self.parse_value() { + let val = match self.parse_value()? { Some(v) => v, - None => return None, + None => return Ok(None), }; // Parse to closing tag // ${[ my_var(a, b, c) ]} self.skip_whitespace(); if !self.match_str("]}") { - return None; + return Ok(None); } - Some(Token::Tag { val }) + Ok(Some(Token::Tag { val })) } #[allow(dead_code)] @@ -167,9 +177,11 @@ impl Parser { ); } - fn parse_value(&mut self) -> Option { - if let Some((name, args)) = self.parse_fn() { + fn parse_value(&mut self) -> Result> { + let v = if let Some((name, args)) = self.parse_fn()? { Some(Val::Fn { name, args }) + } else if let Some(v) = self.parse_string()? { + Some(Val::Str { text: v }) } else if let Some(v) = self.parse_ident() { if v == "null" { Some(Val::Null) @@ -180,38 +192,38 @@ impl Parser { } else { Some(Val::Var { name: v }) } - } else if let Some(v) = self.parse_string() { - Some(Val::Str { text: v }) } else { None - } + }; + + Ok(v) } - fn parse_fn(&mut self) -> Option<(String, Vec)> { + fn parse_fn(&mut self) -> Result)>> { let start_pos = self.pos; let name = match self.parse_fn_name() { Some(v) => v, None => { self.pos = start_pos; - return None; + return Ok(None); } }; - let args = match self.parse_fn_args() { + let args = match self.parse_fn_args()? { Some(args) => args, None => { self.pos = start_pos; - return None; + return Ok(None); } }; - Some((name, args)) + Ok(Some((name, args))) } - fn parse_fn_args(&mut self) -> Option> { + fn parse_fn_args(&mut self) -> Result>> { if !self.match_str("(") { - return None; + return Ok(None); } let start_pos = self.pos; @@ -221,7 +233,7 @@ impl Parser { // Fn closed immediately self.skip_whitespace(); if self.match_str(")") { - return Some(args); + return Ok(Some(args)); } while self.pos < self.chars.len() { @@ -231,7 +243,7 @@ impl Parser { self.skip_whitespace(); self.match_str("="); self.skip_whitespace(); - let value = self.parse_value(); + let value = self.parse_value()?; self.skip_whitespace(); if let (Some(name), Some(value)) = (name.clone(), value.clone()) { @@ -239,7 +251,7 @@ impl Parser { } else { // Didn't find valid thing, so return self.pos = start_pos; - return None; + return Ok(None); } if self.match_str(")") { @@ -251,7 +263,7 @@ impl Parser { // If we don't find a comma, that's bad if !args.is_empty() && !self.match_str(",") { self.pos = start_pos; - return None; + return Ok(None); } if start_pos == self.pos { @@ -259,7 +271,7 @@ impl Parser { } } - Some(args) + Ok(Some(args)) } fn parse_ident(&mut self) -> Option { @@ -319,12 +331,17 @@ impl Parser { Some(text) } - fn parse_string(&mut self) -> Option { + fn parse_string(&mut self) -> Result> { let start_pos = self.pos; let mut text = String::new(); - if !self.match_str("'") { - return None; + let mut is_b64 = false; + if self.match_str("b64'") { + is_b64 = true; + } else if self.match_str("'") { + // Nothing + } else { + return Ok(None); } let mut found_closing = false; @@ -350,10 +367,21 @@ impl Parser { if !found_closing { self.pos = start_pos; - return None; + return Ok(None); } - Some(text) + let final_text = if is_b64 { + let decoded = BASE64_URL_SAFE_NO_PAD + .decode(text.clone()) + .map_err(|_| RenderError(format!("Failed to decode string {text}")))?; + let decoded = String::from_utf8(decoded) + .map_err(|_| RenderError(format!("Failed to decode utf8 string {text}")))?; + decoded + } else { + text + }; + + Ok(Some(final_text)) } fn skip_whitespace(&mut self) { @@ -410,14 +438,15 @@ impl Parser { #[cfg(test)] mod tests { + use crate::error::Result; use crate::Val::Null; use crate::*; #[test] - fn var_simple() { + fn var_simple() -> Result<()> { let mut p = Parser::new("${[ foo ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Var { name: "foo".into() } @@ -425,13 +454,14 @@ mod tests { Token::Eof ] ); + Ok(()) } #[test] - fn var_dashes() { + fn var_dashes() -> Result<()> { let mut p = Parser::new("${[ a-b ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Var { name: "a-b".into() } @@ -439,13 +469,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_underscores() { + fn var_underscores() -> Result<()> { let mut p = Parser::new("${[ a_b ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Var { name: "a_b".into() } @@ -453,13 +485,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_prefixes() { + fn var_prefixes() -> Result<()> { let mut p = Parser::new("${[ -a ]}${[ 0a ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Raw { // Shouldn't be parsed, because they're invalid @@ -468,13 +502,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_underscore_prefix() { + fn var_underscore_prefix() -> Result<()> { let mut p = Parser::new("${[ _a ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Var { name: "_a".into() } @@ -482,13 +518,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_boolean() { + fn var_boolean() -> Result<()> { let mut p = Parser::new("${[ true ]}${[ false ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Bool { value: true }, @@ -499,13 +537,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_multiple_names_invalid() { + fn var_multiple_names_invalid() -> Result<()> { let mut p = Parser::new("${[ foo bar ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Raw { text: "${[ foo bar ]}".into() @@ -513,13 +553,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn tag_string() { + fn tag_string() -> Result<()> { let mut p = Parser::new(r#"${[ 'foo \'bar\' baz' ]}"#); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Str { @@ -529,13 +571,33 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn var_surrounded() { + fn tag_b64_string() -> Result<()> { + let mut p = Parser::new(r#"${[ b64'Zm9vICdiYXInIGJheg' ]}"#); + assert_eq!( + p.parse()?.tokens, + vec![ + Token::Tag { + val: Val::Str { + text: r#"foo 'bar' baz"#.into() + } + }, + Token::Eof + ] + ); + + Ok(()) + } + + #[test] + fn var_surrounded() -> Result<()> { let mut p = Parser::new("Hello ${[ foo ]}!"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Raw { text: "Hello ".to_string() @@ -549,13 +611,15 @@ mod tests { Token::Eof, ] ); + + Ok(()) } #[test] - fn fn_simple() { + fn fn_simple() -> Result<()> { let mut p = Parser::new("${[ foo() ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -566,13 +630,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_dot_name() { + fn fn_dot_name() -> Result<()> { let mut p = Parser::new("${[ foo.bar.baz() ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -583,13 +649,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_ident_arg() { + fn fn_ident_arg() -> Result<()> { let mut p = Parser::new("${[ foo(a=bar) ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -603,13 +671,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_ident_args() { + fn fn_ident_args() -> Result<()> { let mut p = Parser::new("${[ foo(a=bar,b = baz, c =qux ) ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -633,13 +703,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_mixed_args() { + fn fn_mixed_args() -> Result<()> { let mut p = Parser::new(r#"${[ foo(aaa=bar,bb='baz \'hi\'', c=qux, z=true ) ]}"#); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -669,13 +741,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_nested() { + fn fn_nested() -> Result<()> { let mut p = Parser::new("${[ foo(b=bar()) ]}"); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -692,13 +766,15 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn fn_nested_args() { + fn fn_nested_args() -> Result<()> { let mut p = Parser::new(r#"${[ outer(a=inner(a=foo, b='i'), c='o') ]}"#); assert_eq!( - p.parse().tokens, + p.parse()?.tokens, vec![ Token::Tag { val: Val::Fn { @@ -730,10 +806,12 @@ mod tests { Token::Eof ] ); + + Ok(()) } #[test] - fn token_display_var() { + fn token_display_var() -> Result<()> { assert_eq!( Val::Var { name: "foo".to_string() @@ -741,21 +819,38 @@ mod tests { .to_string(), "foo" ); + + Ok(()) } #[test] - fn token_display_str() { + fn token_display_str() -> Result<()> { + assert_eq!( + Val::Str { + text: "Hello You".to_string() + } + .to_string(), + "'Hello You'" + ); + + Ok(()) + } + + #[test] + fn token_display_complex_str() -> Result<()> { assert_eq!( Val::Str { text: "Hello 'You'".to_string() } .to_string(), - "'Hello \'You\''" + "b64'SGVsbG8gJ1lvdSc'" ); + + Ok(()) } #[test] - fn token_null_fn_arg() { + fn token_null_fn_arg() -> Result<()> { assert_eq!( Val::Fn { name: "fn".to_string(), @@ -775,10 +870,12 @@ mod tests { .to_string(), r#"fn(a='aaa')"# ); + + Ok(()) } #[test] - fn token_display_fn() { + fn token_display_fn() -> Result<()> { assert_eq!( Token::Tag { val: Val::Fn { @@ -787,7 +884,7 @@ mod tests { FnArg { name: "arg".to_string(), value: Val::Str { - text: "v".to_string() + text: "v 'x'".to_string() } }, FnArg { @@ -800,12 +897,14 @@ mod tests { } } .to_string(), - r#"${[ foo(arg='v', arg2=my_var) ]}"# + r#"${[ foo(arg=b64'diAneCc', arg2=my_var) ]}"# ); + + Ok(()) } #[test] - fn tokens_display() { + fn tokens_display() -> Result<()> { assert_eq!( Tokens { tokens: vec![ @@ -827,5 +926,7 @@ mod tests { .to_string(), r#"${[ my_var ]} Some cool text ${[ 'Hello World' ]}"# ); + + Ok(()) } } diff --git a/src-tauri/yaak-templates/src/renderer.rs b/src-tauri/yaak-templates/src/renderer.rs index a9e378a3..989c61a6 100644 --- a/src-tauri/yaak-templates/src/renderer.rs +++ b/src-tauri/yaak-templates/src/renderer.rs @@ -1,6 +1,6 @@ use crate::error::Error::{RenderStackExceededError, VariableNotFound}; use crate::error::Result; -use crate::{FnArg, Parser, Token, Tokens, Val}; +use crate::{Parser, Token, Tokens, Val}; use log::warn; use serde_json::json; use std::collections::HashMap; @@ -44,14 +44,14 @@ pub async fn render_json_value_raw( Ok(v) } -async fn parse_and_render_with_depth( +async fn parse_and_render_at_depth( template: &str, vars: &HashMap, cb: &T, depth: usize, ) -> Result { let mut p = Parser::new(template); - let tokens = p.parse(); + let tokens = p.parse()?; render(tokens, vars, cb, depth + 1).await } @@ -60,7 +60,7 @@ pub async fn parse_and_render( vars: &HashMap, cb: &T, ) -> Result { - parse_and_render_with_depth(template, vars, cb, 1).await + parse_and_render_at_depth(template, vars, cb, 1).await } pub async fn render( @@ -79,7 +79,7 @@ pub async fn render( for t in tokens.tokens { match t { Token::Raw { text } => doc_str.push(text), - Token::Tag { val } => doc_str.push(render_tag(val, &vars, cb, depth).await?), + Token::Tag { val } => doc_str.push(render_value(val, &vars, cb, depth).await?), Token::Eof => {} } } @@ -87,44 +87,31 @@ pub async fn render( Ok(doc_str.join("")) } -async fn render_tag( +async fn render_value( val: Val, vars: &HashMap, cb: &T, depth: usize, ) -> Result { let v = match val { - Val::Str { text } => text.into(), + Val::Str { text } => { + let r = Box::pin(parse_and_render_at_depth(&text, vars, cb, depth)).await?; + r.to_string() + } Val::Var { name } => match vars.get(name.as_str()) { Some(v) => { - let r = Box::pin(parse_and_render_with_depth(v, vars, cb, depth)).await?; + let r = Box::pin(parse_and_render_at_depth(v, vars, cb, depth)).await?; r.to_string() } None => return Err(VariableNotFound(name)), }, Val::Bool { value } => value.to_string(), Val::Fn { name, args } => { - let empty = "".to_string(); + // let empty = "".to_string(); let mut resolved_args: HashMap = HashMap::new(); for a in args { - let (k, v) = match a { - FnArg { - name, - value: Val::Str { text }, - } => (name.to_string(), text.to_string()), - FnArg { - name, - value: Val::Var { name: var_name }, - } => ( - name.to_string(), - vars.get(var_name.as_str()).unwrap_or(&empty).to_string(), - ), - FnArg { name, value: val } => { - let r = Box::pin(render_tag(val.clone(), vars, cb, depth)).await?; - (name.to_string(), r) - } - }; - resolved_args.insert(k, v); + let v = Box::pin(render_value(a.value, vars, cb, depth)).await?; + resolved_args.insert(a.name, v); } match cb.run(name.as_str(), resolved_args.clone()).await { Ok(s) => s, @@ -253,6 +240,67 @@ mod parse_and_render_tests { Ok(()) } + #[tokio::test] + async fn render_fn_arg() -> Result<()> { + let vars = HashMap::new(); + let template = r#"${[ upper(foo='bar') ]}"#; + let result = r#"BAR"#; + struct CB {} + impl TemplateCallback for CB { + async fn run(&self, fn_name: &str, args: HashMap) -> Result { + Ok(match fn_name { + "secret" => "abc".to_string(), + "upper" => args["foo"].to_string().to_uppercase(), + _ => "".to_string(), + }) + } + } + + assert_eq!(parse_and_render(template, &vars, &CB {}).await?, result.to_string()); + Ok(()) + } + + #[tokio::test] + async fn render_fn_b64_arg_template() -> Result<()> { + let mut vars = HashMap::new(); + vars.insert("foo".to_string(), "bar".to_string()); + let template = r#"${[ upper(foo=b64'Zm9vICdiYXInIGJheg') ]}"#; + let result = r#"FOO 'BAR' BAZ"#; + struct CB {} + impl TemplateCallback for CB { + async fn run(&self, fn_name: &str, args: HashMap) -> Result { + Ok(match fn_name { + "upper" => args["foo"].to_string().to_uppercase(), + _ => "".to_string(), + }) + } + } + + assert_eq!(parse_and_render(template, &vars, &CB {}).await?, result.to_string()); + Ok(()) + } + + #[tokio::test] + async fn render_fn_arg_template() -> Result<()> { + let mut vars = HashMap::new(); + vars.insert("foo".to_string(), "bar".to_string()); + let template = r#"${[ upper(foo='${[ foo ]}') ]}"#; + let result = r#"BAR"#; + struct CB {} + impl TemplateCallback for CB { + async fn run(&self, fn_name: &str, args: HashMap) -> Result { + Ok(match fn_name { + "secret" => "abc".to_string(), + "upper" => args["foo"].to_string().to_uppercase(), + _ => "".to_string(), + }) + } + } + + assert_eq!(parse_and_render(template, &vars, &CB {}).await?, result.to_string()); + Ok(()) + } + #[tokio::test] async fn render_nested_fn() -> Result<()> { let vars = HashMap::new(); @@ -277,7 +325,6 @@ mod parse_and_render_tests { async fn render_fn_err() -> Result<()> { let vars = HashMap::new(); let template = r#"${[ error() ]}"#; - let result = r#""#; struct CB {} impl TemplateCallback for CB { @@ -286,7 +333,10 @@ mod parse_and_render_tests { } } - assert_eq!(parse_and_render(template, &vars, &CB {}).await?, result.to_string()); + assert_eq!( + parse_and_render(template, &vars, &CB {}).await, + Err(RenderError("Failed to do it!".to_string())) + ); Ok(()) } } diff --git a/src-web/components/TemplateFunctionDialog.tsx b/src-web/components/TemplateFunctionDialog.tsx index 86e77433..443456de 100644 --- a/src-web/components/TemplateFunctionDialog.tsx +++ b/src-web/components/TemplateFunctionDialog.tsx @@ -5,10 +5,13 @@ import { useMemo, useState } from 'react'; import { useDebouncedValue } from '../hooks/useDebouncedValue'; import { useRenderTemplate } from '../hooks/useRenderTemplate'; import { useTemplateTokensToString } from '../hooks/useTemplateTokensToString'; +import { useToggle } from '../hooks/useToggle'; import { Button } from './core/Button'; import { InlineCode } from './core/InlineCode'; -import { VStack } from './core/Stacks'; +import { HStack, VStack } from './core/Stacks'; import { DYNAMIC_FORM_NULL_ARG, DynamicForm } from './DynamicForm'; +import { IconButton } from './core/IconButton'; +import { Banner } from './core/Banner'; interface Props { templateFunction: TemplateFunction; @@ -18,6 +21,7 @@ interface Props { } export function TemplateFunctionDialog({ templateFunction, hide, initialTokens, onChange }: Props) { + const [showSecretsInPreview, toggleShowSecretsInPreview] = useToggle(false); const [argValues, setArgValues] = useState>(() => { const initial: Record = {}; const initialArgs = @@ -77,27 +81,65 @@ export function TemplateFunctionDialog({ templateFunction, hide, initialTokens, const debouncedTagText = useDebouncedValue(tagText.data ?? '', 200); const rendered = useRenderTemplate(debouncedTagText); - const tooLarge = (rendered.data ?? '').length > 10000; + const tooLarge = rendered.data ? rendered.data.length > 10000 : false; + const dataContainsSecrets = useMemo(() => { + for (const [name, value] of Object.entries(argValues)) { + const isPassword = templateFunction.args.some( + (a) => a.type === 'text' && a.password && a.name === name, + ); + if (isPassword && typeof value === 'string' && value && rendered.data?.includes(value)) { + return true; + } + } + return false; + // Only update this on rendered data change to keep secrets hidden on input change + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [rendered.data]); return (

{templateFunction.name}(…)

- -
Preview
- - {tooLarge ? 'too large to preview' : rendered.data || <> } - + + +
Rendered Preview
+ +
+ {rendered.error || tagText.error ? ( + {`${rendered.error || tagText.error}`} + ) : ( + + {dataContainsSecrets && !showSecretsInPreview ? ( + ------ sensitive values hidden ------ + ) : tooLarge ? ( + 'too large to preview' + ) : ( + rendered.data || <>  + )} + + )}