Postman importer semi-complete

This commit is contained in:
Gregory Schier
2023-11-10 09:08:20 -08:00
parent 60b091ff1c
commit 0bec5a6405
21 changed files with 6057 additions and 13 deletions

23
src-tauri/Cargo.lock generated
View File

@@ -618,6 +618,17 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
[[package]]
name = "colored"
version = "1.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f741c91823341bebf717d4c71bda820630ce065443b58bd1b7451af008355"
dependencies = [
"is-terminal",
"lazy_static",
"winapi",
]
[[package]]
name = "combine"
version = "4.6.6"
@@ -1100,6 +1111,7 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9f0c14694cbd524c8720dd69b0e3179344f04ebb5f90f2e4a440c6ea3b2f1ee"
dependencies = [
"colored",
"log",
]
@@ -2098,6 +2110,17 @@ version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"
[[package]]
name = "is-terminal"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi 0.3.3",
"rustix",
"windows-sys 0.48.0",
]
[[package]]
name = "iso8601"
version = "0.3.0"

View File

@@ -44,7 +44,7 @@ tauri = { version = "1.3", features = [
"dialog-save",
] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1", features = ["colored"] }
tokio = { version = "1.25.0", features = ["sync"] }
uuid = "1.3.0"
log = "0.4.20"

View File

@@ -0,0 +1,90 @@
const m = 'https://schema.getpostman.com/json/collection/v2.1.0/collection.json';
function b(e) {
const r = p(e);
if (r == null) return;
const i = s(r.info);
if (i.schema !== m || !Array.isArray(r.item)) return;
const n = {
workspaces: [],
environments: [],
requests: [],
folders: [],
},
c = {
model: 'workspace',
id: 'wrk_0',
name: i.name || 'Postman Import',
description: i.description || '',
};
n.workspaces.push(c);
const u = (o, l = null) => {
if (typeof o.name == 'string' && Array.isArray(o.item)) {
const t = {
model: 'folder',
workspaceId: c.id,
id: `fld_${n.folders.length}`,
name: o.name,
folderId: l,
};
n.folders.push(t);
for (const a of o.item) u(a, t.id);
} else if (typeof o.name == 'string' && 'request' in o) {
const t = s(o.request),
a = f(t.body),
d = {
model: 'http_request',
id: `req_${n.requests.length}`,
workspaceId: c.id,
folderId: l,
name: o.name,
method: t.method || 'GET',
url: s(t.url).raw,
headers: [...a.headers, ...h(t.header).map(y)],
body: a.body,
bodyType: a.bodyType,
// TODO: support auth
// ...importAuth(r.auth),
};
n.requests.push(d);
} else console.log('Unknown item', o, l);
};
for (const o of r.item) u(o);
return { resources: n };
}
function y(e) {
const r = s(e);
return { name: r.key, value: r.value, enabled: !0 };
}
function f(e) {
const r = s(e);
return 'graphql' in r
? {
headers: [
{
name: 'Content-Type',
value: 'application/json',
enabled: !0,
},
],
bodyType: 'graphql',
body: JSON.stringify(
{ query: r.graphql.query, variables: p(r.graphql.variables) },
null,
2,
),
}
: { bodyType: null, body: null };
}
function p(e) {
try {
return s(JSON.parse(e));
} catch {}
return null;
}
function s(e) {
return Object.prototype.toString.call(e) === '[object Object]' ? e : {};
}
function h(e) {
return Object.prototype.toString.call(e) === '[object Array]' ? e : [];
}
export { b as pluginHookImport };

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,127 @@
import { Environment, Folder, HttpRequest, Workspace } from '../../../../src-web/lib/models';
const POSTMAN_2_1_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.1.0/collection.json';
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
interface ExportResources {
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
requests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
}
export function pluginHookImport(contents: string): { resources: ExportResources } | undefined {
const root = parseJSONToRecord(contents);
if (root == null) return;
const info = toRecord(root.info);
if (info.schema !== POSTMAN_2_1_0_SCHEMA || !Array.isArray(root.item)) {
return;
}
const exportResources: ExportResources = {
workspaces: [],
environments: [],
requests: [],
folders: [],
};
const workspace: ExportResources['workspaces'][0] = {
model: 'workspace',
id: 'wrk_0',
name: info.name || 'Postman Import',
description: info.description || '',
};
exportResources.workspaces.push(workspace);
const importItem = (v: Record<string, any>, folderId: string | null = null) => {
if (typeof v.name === 'string' && Array.isArray(v.item)) {
const folder: ExportResources['folders'][0] = {
model: 'folder',
workspaceId: workspace.id,
id: `fld_${exportResources.folders.length}`,
name: v.name,
folderId,
};
exportResources.folders.push(folder);
for (const child of v.item) {
importItem(child, folder.id);
}
} else if (typeof v.name === 'string' && 'request' in v) {
const r = toRecord(v.request);
const bodyPatch = importBody(r.body);
const request: ExportResources['requests'][0] = {
model: 'http_request',
id: `req_${exportResources.requests.length}`,
workspaceId: workspace.id,
folderId,
name: v.name,
method: r.method || 'GET',
url: toRecord(r.url).raw,
headers: [...bodyPatch.headers, ...toArray(r.header).map(importHeader)],
body: bodyPatch.body,
bodyType: bodyPatch.bodyType,
// TODO: support auth
// ...importAuth(r.auth),
};
exportResources.requests.push(request);
} else {
console.log('Unknown item', v, folderId);
}
};
for (const item of root.item) {
importItem(item);
}
return { resources: exportResources };
}
function importHeader(h: any): HttpRequest['headers'][0] {
const header = toRecord(h);
// TODO: support header
return { name: header.key, value: header.value, enabled: true };
}
function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'headers'> {
const body = toRecord(rawBody);
if ('graphql' in body) {
return {
headers: [
{
name: 'Content-Type',
value: 'application/json',
enabled: true,
},
],
bodyType: 'graphql',
body: JSON.stringify(
{ query: body.graphql.query, variables: parseJSONToRecord(body.graphql.variables) },
null,
2,
),
};
} else {
// TODO: support other body types
return { headers: [], bodyType: null, body: null };
}
}
function parseJSONToRecord(jsonStr: string): Record<string, any> | null {
try {
return toRecord(JSON.parse(jsonStr));
} catch (err) {}
return null;
}
function toRecord(value: any): Record<string, any> {
if (Object.prototype.toString.call(value) === '[object Object]') return value;
else return {};
}
function toArray(value: any): any[] {
if (Object.prototype.toString.call(value) === '[object Array]') return value;
else return [];
}

View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2020",
"useDefineForClassFields": true,
"module": "ESNext",
"lib": [
"ES2020"
],
"skipLibCheck": true,
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true
},
"include": [
"src"
]
}

View File

@@ -0,0 +1,13 @@
import { resolve } from 'path';
import { defineConfig } from 'vite';
export default defineConfig({
build: {
lib: {
entry: resolve(__dirname, 'src/index.ts'),
fileName: 'index',
formats: ['es'],
},
outDir: resolve(__dirname, 'out'),
},
});

View File

@@ -14,6 +14,7 @@ use std::io::Write;
use std::process::exit;
use base64::Engine;
use fern::colors::ColoredLevelConfig;
use http::header::{HeaderName, ACCEPT, USER_AGENT};
use http::{HeaderMap, HeaderValue, Method};
use log::info;
@@ -27,14 +28,14 @@ use sqlx::{Pool, Sqlite, SqlitePool};
use tauri::TitleBarStyle;
use tauri::{AppHandle, Menu, RunEvent, State, Submenu, Window, WindowUrl, Wry};
use tauri::{CustomMenuItem, Manager, WindowEvent};
use tauri_plugin_log::LogTarget;
use tauri_plugin_log::{fern, LogTarget};
use tauri_plugin_window_state::{StateFlags, WindowExt};
use tokio::sync::Mutex;
use window_ext::TrafficLightWindowExt;
use crate::analytics::{track_event, AnalyticsAction, AnalyticsResource};
use crate::plugin::ImportResources;
use crate::plugin::{ImportResources, ImportResult};
mod analytics;
mod models;
@@ -268,8 +269,8 @@ async fn import_data(
file_paths: Vec<&str>,
) -> Result<ImportResources, String> {
let pool = &*db_instance.lock().await;
let mut resources: Option<ImportResources> = None;
let plugins = vec!["yaak-importer", "insomnia-importer"];
let mut result: Option<ImportResult> = None;
let plugins = vec!["importer-yaak", "importer-insomnia", "importer-postman"];
for plugin_name in plugins {
if let Some(r) = plugin::run_plugin_import(
&window.app_handle(),
@@ -278,18 +279,18 @@ async fn import_data(
)
.await
{
resources = Some(r);
result = Some(r);
break;
}
}
match resources {
match result {
None => Err("No importers found for the chosen file".to_string()),
Some(r) => {
let mut imported_resources = ImportResources::default();
info!("Importing resources");
for w in r.workspaces {
for w in r.resources.workspaces {
let x = models::upsert_workspace(pool, w)
.await
.expect("Failed to create workspace");
@@ -297,7 +298,7 @@ async fn import_data(
info!("Imported workspace: {}", x.name);
}
for e in r.environments {
for e in r.resources.environments {
let x = models::upsert_environment(pool, e)
.await
.expect("Failed to create environment");
@@ -305,7 +306,7 @@ async fn import_data(
info!("Imported environment: {}", x.name);
}
for f in r.folders {
for f in r.resources.folders {
let x = models::upsert_folder(pool, f)
.await
.expect("Failed to create folder");
@@ -313,7 +314,7 @@ async fn import_data(
info!("Imported folder: {}", x.name);
}
for r in r.requests {
for r in r.resources.requests {
let x = models::upsert_request(pool, r)
.await
.expect("Failed to create request");
@@ -794,6 +795,8 @@ fn main() {
.targets([LogTarget::LogDir, LogTarget::Stdout, LogTarget::Webview])
.level_for("tao", log::LevelFilter::Info)
.level_for("sqlx", log::LevelFilter::Warn)
.with_colors(ColoredLevelConfig::default())
.level(log::LevelFilter::Trace)
.build(),
)
.plugin(tauri_plugin_window_state::Builder::default().build())

View File

@@ -15,6 +15,11 @@ use tauri::AppHandle;
use crate::models::{Environment, Folder, HttpRequest, Workspace};
#[derive(Default, Debug, Deserialize, Serialize)]
pub struct ImportResult {
pub resources: ImportResources,
}
#[derive(Default, Debug, Deserialize, Serialize)]
pub struct ImportResources {
pub workspaces: Vec<Workspace>,
@@ -27,7 +32,7 @@ pub async fn run_plugin_import(
app_handle: &AppHandle,
plugin_name: &str,
file_path: &str,
) -> Option<ImportResources> {
) -> Option<ImportResult> {
let file = fs::read_to_string(file_path)
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
let file_contents = file.as_str();
@@ -42,7 +47,7 @@ pub async fn run_plugin_import(
return None;
}
let resources: ImportResources =
let resources: ImportResult =
serde_json::from_value(result_json).expect("failed to parse result json");
Some(resources)
}