From eb4d33c8605c740e8fab7efb4de710137310b852 Mon Sep 17 00:00:00 2001 From: Anthony Dodd Date: Thu, 4 Mar 2021 11:03:31 -0600 Subject: [PATCH] Proxy support for WebSockets is g2g! The issue described in #95 has been thoroughly tested & is now fixed. Shoutout to @jakule for their work on implementing a solid fix for this. closes #95 Co-authored-by: Hamza --- CHANGELOG.md | 12 +++ Cargo.lock | 107 ++++++++++++++++++++---- Cargo.toml | 14 ++-- README.md | 4 +- Trunk.toml | 6 ++ site/content/configuration.md | 4 +- src/build.rs | 5 +- src/common.rs | 23 ++++-- src/config/mod.rs | 3 +- src/config/models.rs | 9 ++ src/config/rt.rs | 3 + src/pipelines/copydir.rs | 18 ++-- src/pipelines/copyfile.rs | 14 ++-- src/pipelines/css.rs | 14 ++-- src/pipelines/html.rs | 23 ++++-- src/pipelines/icon.rs | 14 ++-- src/pipelines/inline.rs | 16 ++-- src/pipelines/mod.rs | 34 ++++---- src/pipelines/rust_app.rs | 23 +++--- src/pipelines/rust_worker.rs | 6 +- src/pipelines/sass.rs | 14 ++-- src/proxy.rs | 149 +++++++++++++++++++++++++++++++--- src/serve.rs | 24 +++--- 23 files changed, 402 insertions(+), 137 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 27a7b689..3ab70f37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,18 @@ Subheadings to categorize changes are `added, changed, deprecated, removed, fixe ## Unreleased +## 0.9.0 +### added +Added support for proxying WebSockets. This was a long-standing feature request. Due to changes upstream in the async-std/tide ecosystem, we are now able to properly support this. This will also unlock some nice features such as HMR via WebSockets, and other such niceties. + +- Added the `--proxy-ws` CLI option for enabling WebSocket proxying on a CLI defined proxy. +- Added the `ws = true` field to the `Trunk.toml` `[[proxy]]` sections which will enable WebSocket proxying for proxies defined in the `Trunk.toml`. + +### fixed +- Closed [#81](https://github.com/thedodd/trunk/issues/81): this is no longer needed as we now have support for WebSockets. HTTP2 is still outstanding, but that will not be a blocker for use from the web. +- Closed [#95](https://github.com/thedodd/trunk/issues/95): fixed via a few small changes to precendce in routing. +- Closed [#53](https://github.com/thedodd/trunk/issues/53): we've now implemented support for proxying WebSockets. + ## 0.8.3 ### fixed - Fixed [#133](https://github.com/thedodd/trunk/issues/133) where `watch` was infinitely looping on Windows diff --git a/Cargo.lock b/Cargo.lock index 02fbd170..63ee7547 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -144,9 +144,9 @@ dependencies = [ [[package]] name = "async-h1" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9e2a9745d9cd0d92ed7641ce4d07568985762f92633260f0afe8ac7917d9d7" +checksum = "cc5142de15b549749cce62923a50714b0d7b77f5090ced141599e78899865451" dependencies = [ "async-channel", "async-dup", @@ -272,7 +272,7 @@ dependencies = [ "memchr", "num_cpus", "once_cell", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "pin-utils", "slab", "wasm-bindgen-futures", @@ -295,6 +295,20 @@ dependencies = [ "syn", ] +[[package]] +name = "async-tungstenite" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39eca8dd578b18e557361e50ca767df55c5e62f690a5e53868c3c7a8123145b7" +dependencies = [ + "async-std", + "futures-io", + "futures-util", + "log", + "pin-project", + "tungstenite", +] + [[package]] name = "atomic-waker" version = "1.0.0" @@ -1030,7 +1044,7 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "waker-fn", ] @@ -1071,7 +1085,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "pin-utils", "proc-macro-hack", "proc-macro-nested", @@ -1243,7 +1257,7 @@ dependencies = [ "cookie", "futures-lite", "infer", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "rand 0.7.3", "serde", "serde_json", @@ -1307,6 +1321,15 @@ dependencies = [ "libc", ] +[[package]] +name = "input_buffer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a8a95243d5a0398cae618ec29477c6e3cb631152be5c19481f80bc71559754" +dependencies = [ + "bytes 0.5.6", +] + [[package]] name = "insta" version = "0.16.1" @@ -1599,9 +1622,8 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" [[package]] name = "nipper" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576d0e437aa08b447a207584463febe639d00b26b63121a9c038eff8371e0050" +version = "0.1.9" +source = "git+https://github.com/thedodd/nipper.git?branch=iter-attrs#3e6d4c7ffccf55b9caf5f2932c539dae965eff70" dependencies = [ "cssparser", "html5ever", @@ -1683,9 +1705,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "open" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e288ead50d896dde82f3c18b64e40a2bf3e941225aa5c6b35a3e8e7b6b21d6f" +checksum = "b2033f93630dd4b04768ecf5e16bcd3002a89e1e1dbef375bf290dd67e2b7a4d" dependencies = [ "which", "winapi 0.3.9", @@ -1813,9 +1835,9 @@ checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" [[package]] name = "pin-project-lite" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cf491442e4b033ed1c722cb9f0df5fcfcf4de682466c46469c36bc47dc5548a" +checksum = "dc0e1f259c92177c30a4c9d177246edd0a3568b25756a977d0632cf8fa37e905" [[package]] name = "pin-utils" @@ -2276,6 +2298,19 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "sha-1" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfebf75d25bd900fd1e7d11501efab59bc846dbc76196839663e6637bba9f25f" +dependencies = [ + "block-buffer", + "cfg-if 1.0.0", + "cpuid-bool 0.1.2", + "digest", + "opaque-debug", +] + [[package]] name = "sha1" version = "0.6.0" @@ -2513,7 +2548,7 @@ dependencies = [ "log", "mime_guess", "once_cell", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "serde", "serde_json", "web-sys", @@ -2618,12 +2653,29 @@ dependencies = [ "http-types", "kv-log-macro", "log", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "route-recognizer", "serde", "serde_json", ] +[[package]] +name = "tide-websockets" +version = "0.2.0" +source = "git+https://github.com/http-rs/tide-websockets.git?rev=270f408cdf4e5ee2bd28c7f5fcb57e5085d49ead#270f408cdf4e5ee2bd28c7f5fcb57e5085d49ead" +dependencies = [ + "async-dup", + "async-std", + "async-tungstenite", + "base64 0.13.0", + "futures-util", + "pin-project", + "serde", + "serde_json", + "sha-1", + "tide", +] + [[package]] name = "time" version = "0.1.44" @@ -2705,7 +2757,7 @@ checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" dependencies = [ "cfg-if 1.0.0", "log", - "pin-project-lite 0.2.5", + "pin-project-lite 0.2.6", "tracing-attributes", "tracing-core", ] @@ -2742,11 +2794,12 @@ dependencies = [ [[package]] name = "trunk" -version = "0.8.3" +version = "0.9.0" dependencies = [ "anyhow", "async-process", "async-std", + "async-tungstenite", "cargo_metadata", "console 0.14.0", "dunce", @@ -2767,9 +2820,29 @@ dependencies = [ "structopt-derive", "surf", "tide", + "tide-websockets", "toml", ] +[[package]] +name = "tungstenite" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0308d80d86700c5878b9ef6321f020f29b1bb9d5ff3cab25e75e23f3a492a23" +dependencies = [ + "base64 0.12.3", + "byteorder", + "bytes 0.5.6", + "http", + "httparse", + "input_buffer", + "log", + "rand 0.7.3", + "sha-1", + "url", + "utf-8", +] + [[package]] name = "typenum" version = "1.12.0" diff --git a/Cargo.toml b/Cargo.toml index c2c84a23..930a902a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "trunk" -version = "0.8.3" +version = "0.9.0" edition = "2018" description = "Build, bundle & ship your Rust WASM application to the web." license = "MIT/Apache-2.0" @@ -18,7 +18,8 @@ panic = "abort" [dependencies] anyhow = "1" async-process = "1" -async-std = { version="1.9", features=["attributes", "unstable"] } +async-std = { version = "1.9", default-features = false, features = ["attributes", "std", "unstable"] } +async-tungstenite = { version = "0.10.0", default-features = false, features = ["async-std-runtime"] } cargo_metadata = "0.12" console = "0.14" dunce = "1" @@ -27,18 +28,19 @@ fs_extra = "1" futures = "0.3" http-types = "2" indicatif = "0.15" -nipper = "0.1" +nipper = { git = "https://github.com/thedodd/nipper.git", branch = "iter-attrs" } notify = "4" open = "1" +remove_dir_all = "0.6" sass-rs = "0.2.2" seahash = "4" -serde = { version="1", features=["derive"] } +serde = { version = "1", features = ["derive"] } structopt = "0.3" structopt-derive = "0.4" surf = "2" -tide = { version="0.16.0", features=["unstable"] } +tide = { version = "0.16.0", features = ["unstable"] } +tide-websockets = { git = "https://github.com/http-rs/tide-websockets.git", rev = "270f408cdf4e5ee2bd28c7f5fcb57e5085d49ead" } toml = "0.5" -remove_dir_all = "0.6" [dev-dependencies] insta = "0.16.1" diff --git a/README.md b/README.md index 50e5d641..da18e219 100644 --- a/README.md +++ b/README.md @@ -161,8 +161,10 @@ The `trunk serve` command accepts two proxy related flags. `--proxy-rewrite` specifies an alternative URI on which the Trunk server is to listen for proxy requests. Any requests received on the given URI will be rewritten to match the URI of the proxy backend, effectively stripping the rewrite prefix. E.G., `trunk serve --proxy-backend=http://localhost:9000/ --proxy-rewrite=/api/` will proxy any requests received on `/api/` over to `http://localhost:9000/` with the `/api/` prefix stripped from the request, while everything following the `/api/` prefix will be left unchanged. +`--proxy-ws` specifies that the proxy is for a WebSocket endpoint. + ### config file -The `Trunk.toml` config file accepts multiple `[[proxy]]` sections, which allows for multiple proxies to be configured. Each section requires at least the `backend` field, and optionally accepts the `rewrite` field, both corresponding to the `--proxy-*` CLI flags discussed above. +The `Trunk.toml` config file accepts multiple `[[proxy]]` sections, which allows for multiple proxies to be configured. Each section requires at least the `backend` field, and optionally accepts the `rewrite` and `ws` fields, corresponding to the `--proxy-*` CLI flags discussed above. As it is with other Trunk config, a proxy declared via CLI will take final precedence and will cause any config file proxies to be ignored, even if there are multiple proxies declared in the config file. diff --git a/Trunk.toml b/Trunk.toml index d4d9787c..7b446c65 100644 --- a/Trunk.toml +++ b/Trunk.toml @@ -32,6 +32,12 @@ cargo = false # Proxies are optional, and default to `None`. # Proxies are only run as part of the `trunk serve` command. +[[proxy]] +# This WebSocket proxy example has a backend and ws field. This example will listen for +# WebSocket connections at `/api/ws` and proxy them to `ws://localhost:9000/api/ws`. +backend = "ws://localhost:9000/api/ws" +ws = true + [[proxy]] # This proxy example has a backend and a rewrite field. Requests received on `rewrite` will be # proxied to the backend after rewriting the `rewrite` prefix to the `backend`'s URI prefix. diff --git a/site/content/configuration.md b/site/content/configuration.md index 4850cccd..1967bbf3 100644 --- a/site/content/configuration.md +++ b/site/content/configuration.md @@ -27,8 +27,10 @@ The `trunk serve` command accepts two proxy related flags. `--proxy-rewrite` specifies an alternative URI on which the Trunk server is to listen for proxy requests. Any requests received on the given URI will be rewritten to match the URI of the proxy backend, effectively stripping the rewrite prefix. E.G., `trunk serve --proxy-backend=http://localhost:9000/ --proxy-rewrite=/api/` will proxy any requests received on `/api/` over to `http://localhost:9000/` with the `/api/` prefix stripped from the request, while everything following the `/api/` prefix will be left unchanged. +`--proxy-ws` specifies that the proxy is for a WebSocket endpoint. + ## Config File -The `Trunk.toml` config file accepts multiple `[[proxy]]` sections, which allows for multiple proxies to be configured. Each section requires at least the `backend` field, and optionally accepts the `rewrite` field, both corresponding to the `--proxy-*` CLI flags discussed above. +The `Trunk.toml` config file accepts multiple `[[proxy]]` sections, which allows for multiple proxies to be configured. Each section requires at least the `backend` field, and optionally accepts the `rewrite` and `ws` fields, both corresponding to the `--proxy-*` CLI flags discussed above. As it is with other Trunk config, a proxy declared via CLI will take final precedence and will cause any config file proxies to be ignored, even if there are multiple proxies declared in the config file. diff --git a/src/build.rs b/src/build.rs index d3d57e6b..90ba6a3d 100644 --- a/src/build.rs +++ b/src/build.rs @@ -5,7 +5,6 @@ use std::sync::Arc; use anyhow::{Context, Result}; use async_std::fs; -use async_std::path::Path; use futures::channel::mpsc::Sender; use futures::stream::StreamExt; use indicatif::ProgressBar; @@ -77,7 +76,7 @@ impl BuildSystem { // Spawn the source HTML pipeline. This will spawn all other pipelines derived from // the source HTML, and will ultimately generate and write the final HTML. - self.html_pipeline.clone().spawn().await?; + self.html_pipeline.clone().spawn().await.context("error HTML pipeline")?; // Move distrbution from staging dist to final dist self.finalize_dist().await.context("error applying built distribution")?; @@ -87,7 +86,7 @@ impl BuildSystem { /// Creates a "staging area" (dist/.stage) for storing intermediate build results. async fn prepare_staging_dist(&self) -> Result<()> { // Prepare staging area in which we will assemble the latest build - let staging_dist: &Path = self.cfg.staging_dist.as_path().into(); + let staging_dist = self.cfg.staging_dist.as_path(); // Clean staging area, if applicable remove_dir_all(staging_dist.into()).await.context("error cleaning staging dist dir")?; diff --git a/src/common.rs b/src/common.rs index d8a60772..2a14380d 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,9 +1,10 @@ //! Common functionality and types. -use std::path::PathBuf; +use std::io::ErrorKind; +use std::path::{Path, PathBuf}; use anyhow::{anyhow, Context, Result}; -use async_std::path::PathBuf as AsyncPathBuf; +use async_std::fs; use async_std::task::spawn_blocking; use console::Emoji; @@ -23,10 +24,11 @@ pub fn parse_public_url(val: &str) -> String { /// A utility function to recursively copy a directory. pub async fn copy_dir_recursive(from_dir: PathBuf, to_dir: PathBuf) -> Result<()> { - if !AsyncPathBuf::from(&from_dir).exists().await { + if !path_exists(&from_dir).await? { return Err(anyhow!("directory can not be copied as it does not exist {:?}", &from_dir)); } - spawn_blocking(move || { + + spawn_blocking(move || -> Result<()> { let opts = fs_extra::dir::CopyOptions { overwrite: true, content_only: true, @@ -36,6 +38,7 @@ pub async fn copy_dir_recursive(from_dir: PathBuf, to_dir: PathBuf) -> Result<() Ok(()) }) .await + .context("error copying directory") } /// A utility function to recursively delete a directory. @@ -43,7 +46,7 @@ pub async fn copy_dir_recursive(from_dir: PathBuf, to_dir: PathBuf) -> Result<() /// Use this instead of fs::remove_dir_all(...) because of Windows compatibility issues, per /// advice of https://blog.qwaz.io/chat/issues-of-rusts-remove-dir-all-implementation-on-windows pub async fn remove_dir_all(from_dir: PathBuf) -> Result<()> { - if !AsyncPathBuf::from(&from_dir).exists().await { + if !path_exists(&from_dir).await? { return Ok(()); } spawn_blocking(move || { @@ -53,6 +56,16 @@ pub async fn remove_dir_all(from_dir: PathBuf) -> Result<()> { .await } +/// Checks if path exists. +pub async fn path_exists(path: impl AsRef) -> Result { + let exists = fs::metadata(path.as_ref()) + .await + .map(|_| true) + .or_else(|error| if error.kind() == ErrorKind::NotFound { Ok(false) } else { Err(error) }) + .with_context(|| format!("error checking for existance of path at {:?}", path.as_ref()))?; + Ok(exists) +} + /// Build system spinner. pub fn spinner() -> ProgressBar { let style = ProgressStyle::default_spinner().template("{spinner} {prefix} trunk | {wide_msg}"); diff --git a/src/config/mod.rs b/src/config/mod.rs index 4aa32ea3..e773934a 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -8,7 +8,8 @@ mod manifest; mod models; mod rt; -/// The default name of the directory where final build artifacts are placed after a successful build. +/// The default name of the directory where final build artifacts are +/// placed after a successful build. pub const DIST_DIR: &str = "dist"; /// The name of the directory used to stage build artifacts during an active build. pub const STAGE_DIR: &str = ".stage"; diff --git a/src/config/models.rs b/src/config/models.rs index 298007e2..14c679be 100644 --- a/src/config/models.rs +++ b/src/config/models.rs @@ -57,6 +57,10 @@ pub struct ConfigOptsServe { #[structopt(long = "proxy-rewrite")] #[serde(default)] pub proxy_rewrite: Option, + /// Configure the proxy for handling WebSockets [default: false] + #[structopt(long = "proxy-ws")] + #[serde(default)] + pub proxy_ws: bool, } /// Config options for the serve system. @@ -86,6 +90,9 @@ pub struct ConfigOptsProxy { /// When a value is specified, requests received on this URI will have this URI segment replaced /// with the URI of the `backend`. pub rewrite: Option, + /// Configure the proxy for handling WebSockets. + #[serde(default)] + pub ws: bool, } /// A model of all potential configuration options for the Trunk CLI system. @@ -182,6 +189,7 @@ impl ConfigOpts { open: cli.open, proxy_backend: cli.proxy_backend, proxy_rewrite: cli.proxy_rewrite, + proxy_ws: cli.proxy_ws, }; let cfg = ConfigOpts { build: None, @@ -317,6 +325,7 @@ impl ConfigOpts { g.proxy_backend = g.proxy_backend.or(l.proxy_backend); g.proxy_rewrite = g.proxy_rewrite.or(l.proxy_rewrite); g.port = g.port.or(l.port); + g.proxy_ws = g.proxy_ws || l.proxy_ws; // NOTE: this can not be disabled in the cascade. if l.open { g.open = true diff --git a/src/config/rt.rs b/src/config/rt.rs index 0e7e1683..a28f2f55 100644 --- a/src/config/rt.rs +++ b/src/config/rt.rs @@ -115,6 +115,8 @@ pub struct RtcServe { pub proxy_backend: Option, /// The URI on which to accept requests which are to be rewritten and proxied to backend. pub proxy_rewrite: Option, + /// Configure the proxy for handling WebSockets. + pub proxy_ws: bool, /// Any proxies configured to run along with the server. pub proxies: Option>, } @@ -130,6 +132,7 @@ impl RtcServe { open: opts.open, proxy_backend: opts.proxy_backend, proxy_rewrite: opts.proxy_rewrite, + proxy_ws: opts.proxy_ws, proxies, }) } diff --git a/src/pipelines/copydir.rs b/src/pipelines/copydir.rs index ec8dc5b0..26ac1cd6 100644 --- a/src/pipelines/copydir.rs +++ b/src/pipelines/copydir.rs @@ -4,12 +4,13 @@ use std::path::PathBuf; use std::sync::Arc; use anyhow::{anyhow, Context, Result}; +use async_std::fs; use async_std::task::{spawn, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; -use super::TrunkLinkPipelineOutput; use super::ATTR_HREF; +use super::{LinkAttrs, TrunkLinkPipelineOutput}; use crate::common::copy_dir_recursive; use crate::config::RtcBuild; @@ -28,13 +29,13 @@ pub struct CopyDir { impl CopyDir { pub const TYPE_COPY_DIR: &'static str = "copy-dir"; - pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { + pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { // Build the path to the target asset. - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); if !path.is_absolute() { path = html_dir.join(path); } @@ -45,8 +46,7 @@ impl CopyDir { pub fn spawn(self) -> JoinHandle> { spawn(async move { self.progress.set_message("copying directory"); - let canonical_path = async_std::path::Path::new(&self.path) - .canonicalize() + let canonical_path = fs::canonicalize(&self.path) .await .with_context(|| format!("error taking canonical path of directory {:?}", &self.path))?; let dir_name = canonical_path diff --git a/src/pipelines/copyfile.rs b/src/pipelines/copyfile.rs index b92065bd..bc1d4e27 100644 --- a/src/pipelines/copyfile.rs +++ b/src/pipelines/copyfile.rs @@ -6,10 +6,10 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use async_std::task::{spawn, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; use super::ATTR_HREF; -use super::{AssetFile, TrunkLinkPipelineOutput}; +use super::{AssetFile, LinkAttrs, TrunkLinkPipelineOutput}; use crate::config::RtcBuild; /// A CopyFile asset pipeline. @@ -27,13 +27,13 @@ pub struct CopyFile { impl CopyFile { pub const TYPE_COPY_FILE: &'static str = "copy-file"; - pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { + pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { // Build the path to the target asset. - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); let asset = AssetFile::new(&html_dir, path).await?; Ok(Self { id, cfg, progress, asset }) } diff --git a/src/pipelines/css.rs b/src/pipelines/css.rs index 4c3a63b5..28f26e86 100644 --- a/src/pipelines/css.rs +++ b/src/pipelines/css.rs @@ -6,10 +6,10 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use async_std::task::{spawn, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; use super::ATTR_HREF; -use super::{AssetFile, HashedFileOutput, TrunkLinkPipelineOutput}; +use super::{AssetFile, HashedFileOutput, LinkAttrs, TrunkLinkPipelineOutput}; use crate::config::RtcBuild; /// A CSS asset pipeline. @@ -27,13 +27,13 @@ pub struct Css { impl Css { pub const TYPE_CSS: &'static str = "css"; - pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { + pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { // Build the path to the target asset. - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); let asset = AssetFile::new(&html_dir, path).await?; Ok(Self { id, cfg, progress, asset }) } diff --git a/src/pipelines/html.rs b/src/pipelines/html.rs index 4a0209a4..d27f027f 100644 --- a/src/pipelines/html.rs +++ b/src/pipelines/html.rs @@ -13,7 +13,7 @@ use nipper::Document; use crate::config::RtcBuild; use crate::pipelines::rust_app::RustApp; -use crate::pipelines::{TrunkLink, TrunkLinkPipelineOutput, TRUNK_ID}; +use crate::pipelines::{LinkAttrs, TrunkLink, TrunkLinkPipelineOutput, TRUNK_ID}; const PUBLIC_URL_MARKER_ATTR: &str = "data-trunk-public-url"; @@ -71,14 +71,23 @@ impl HtmlPipeline { // Iterator over all `link[data-trunk]` elements, assigning IDs & building pipelines. let mut assets = vec![]; - for (id, mut link) in target_html.select(r#"link[data-trunk]"#).iter().enumerate() { + let links = target_html.select(r#"link[data-trunk]"#); + for (id, link) in links.nodes().iter().enumerate() { + // Set the link's Trunk ID & accumulate all attrs. The main reason we collect this as + // raw data instead of passing around the link itself is so that we are not + // constrainted by `!Send` types. link.set_attr(TRUNK_ID, &id.to_string()); + let attrs = link.attrs().into_iter().fold(LinkAttrs::new(), |mut acc, attr| { + acc.insert(attr.name.local.as_ref().to_string(), attr.value.to_string()); + acc + }); + let asset = TrunkLink::from_html( self.cfg.clone(), self.progress.clone(), self.target_html_dir.clone(), self.ignore_chan.clone(), - link, + attrs, id, ) .await?; @@ -87,7 +96,7 @@ impl HtmlPipeline { // Ensure we have a Rust app pipeline to spawn. let rust_app_nodes = target_html.select(r#"link[data-trunk][rel="rust"]"#).length(); - ensure!(rust_app_nodes <= 1, r#"only one link may be specified"#); + ensure!(rust_app_nodes <= 1, r#"only one may be specified"#); if rust_app_nodes == 0 { let app = RustApp::new_default( self.cfg.clone(), @@ -108,8 +117,8 @@ impl HtmlPipeline { self.finalize_html(&mut target_html); // Assemble a new output index.html file. - let output_html = target_html.html(); // TODO: prettify this output. - fs::write(self.cfg.staging_dist.join("index.html"), output_html.as_bytes()) + let output_html = target_html.html().to_string(); // TODO: prettify this output. + fs::write(self.cfg.staging_dist.join("index.html"), &output_html) .await .context("error writing finalized HTML output")?; @@ -119,7 +128,7 @@ impl HtmlPipeline { /// Finalize asset pipelines & prep the DOM for final output. async fn finalize_asset_pipelines(&self, target_html: &mut Document, mut pipelines: AssetPipelineHandles) -> Result<()> { while let Some(asset_res) = pipelines.next().await { - let asset = asset_res?; + let asset = asset_res.context("failed to spawn assets finalization")?; asset.finalize(target_html).await?; } Ok(()) diff --git a/src/pipelines/icon.rs b/src/pipelines/icon.rs index ebeb936d..b4ddd7bf 100644 --- a/src/pipelines/icon.rs +++ b/src/pipelines/icon.rs @@ -6,10 +6,10 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use async_std::task::{spawn, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; use super::ATTR_HREF; -use super::{AssetFile, HashedFileOutput, TrunkLinkPipelineOutput}; +use super::{AssetFile, HashedFileOutput, LinkAttrs, TrunkLinkPipelineOutput}; use crate::config::RtcBuild; /// An Icon asset pipeline. @@ -27,13 +27,13 @@ pub struct Icon { impl Icon { pub const TYPE_ICON: &'static str = "icon"; - pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { + pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { // Build the path to the target asset. - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); let asset = AssetFile::new(&html_dir, path).await?; Ok(Self { id, cfg, progress, asset }) } diff --git a/src/pipelines/inline.rs b/src/pipelines/inline.rs index e24ab285..8c00dfb9 100644 --- a/src/pipelines/inline.rs +++ b/src/pipelines/inline.rs @@ -7,9 +7,9 @@ use std::sync::Arc; use anyhow::{anyhow, bail, Result}; use async_std::task::{spawn, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; -use super::{AssetFile, TrunkLinkPipelineOutput, ATTR_HREF, ATTR_TYPE}; +use super::{AssetFile, LinkAttrs, TrunkLinkPipelineOutput, ATTR_HREF, ATTR_TYPE}; /// An Inline asset pipeline. pub struct Inline { @@ -27,16 +27,16 @@ pub struct Inline { impl Inline { pub const TYPE_INLINE: &'static str = "inline"; - pub async fn new(progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + pub async fn new(progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); let asset = AssetFile::new(&html_dir, path).await?; - let content_type = ContentType::from_attr_or_ext(el.attr(ATTR_TYPE), &asset.ext)?; + let content_type = ContentType::from_attr_or_ext(attrs.get(ATTR_TYPE), &asset.ext)?; Ok(Self { id, diff --git a/src/pipelines/mod.rs b/src/pipelines/mod.rs index b9127ce2..c26d1fef 100644 --- a/src/pipelines/mod.rs +++ b/src/pipelines/mod.rs @@ -8,6 +8,7 @@ mod rust_app; mod rust_worker; mod sass; +use std::collections::HashMap; use std::ffi::OsString; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -17,7 +18,7 @@ use async_std::fs; use async_std::task::JoinHandle; use futures::channel::mpsc::Sender; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; use crate::config::RtcBuild; use crate::pipelines::copydir::{CopyDir, CopyDirOutput}; @@ -37,6 +38,9 @@ const ATTR_REL: &str = "rel"; const SNIPPETS_DIR: &str = "snippets"; const TRUNK_ID: &str = "data-trunk-id"; +/// A mapping of all attrs associated with a specific `` element. +pub type LinkAttrs = HashMap; + /// A model of all of the supported Trunk asset links expressed in the source HTML as /// `` elements. /// @@ -58,23 +62,23 @@ pub enum TrunkLink { impl TrunkLink { /// Construct a new instance. pub async fn from_html( - cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, el: Selection<'_>, id: usize, + cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, attrs: LinkAttrs, id: usize, ) -> Result { - let rel = el - .attr(ATTR_REL) + let rel = attrs + .get(ATTR_REL) .ok_or_else(|| anyhow!("all elements must have a `rel` attribute indicating the asset type"))?; - Ok(match rel.as_ref() { - Sass::TYPE_SASS | Sass::TYPE_SCSS => Self::Sass(Sass::new(cfg, progress, html_dir, el, id).await?), - Icon::TYPE_ICON => Self::Icon(Icon::new(cfg, progress, html_dir, el, id).await?), - Inline::TYPE_INLINE => Self::Inline(Inline::new(progress, html_dir, el, id).await?), - Css::TYPE_CSS => Self::Css(Css::new(cfg, progress, html_dir, el, id).await?), - CopyFile::TYPE_COPY_FILE => Self::CopyFile(CopyFile::new(cfg, progress, html_dir, el, id).await?), - CopyDir::TYPE_COPY_DIR => Self::CopyDir(CopyDir::new(cfg, progress, html_dir, el, id).await?), - RustApp::TYPE_RUST_APP => Self::RustApp(RustApp::new(cfg, progress, html_dir, ignore_chan, el, id).await?), - RustWorker::TYPE_RUST_WORKER => Self::RustWorker(RustWorker::new(cfg, progress, html_dir, ignore_chan, el, id).await?), + Ok(match rel.as_str() { + Sass::TYPE_SASS | Sass::TYPE_SCSS => Self::Sass(Sass::new(cfg, progress, html_dir, attrs, id).await?), + Icon::TYPE_ICON => Self::Icon(Icon::new(cfg, progress, html_dir, attrs, id).await?), + Inline::TYPE_INLINE => Self::Inline(Inline::new(progress, html_dir, attrs, id).await?), + Css::TYPE_CSS => Self::Css(Css::new(cfg, progress, html_dir, attrs, id).await?), + CopyFile::TYPE_COPY_FILE => Self::CopyFile(CopyFile::new(cfg, progress, html_dir, attrs, id).await?), + CopyDir::TYPE_COPY_DIR => Self::CopyDir(CopyDir::new(cfg, progress, html_dir, attrs, id).await?), + RustApp::TYPE_RUST_APP => Self::RustApp(RustApp::new(cfg, progress, html_dir, ignore_chan, attrs, id).await?), + RustWorker::TYPE_RUST_WORKER => Self::RustWorker(RustWorker::new(cfg, progress, html_dir, ignore_chan, attrs, id).await?), _ => bail!( r#"unknown attr value `rel="{}"`; please ensure the value is lowercase and is a supported asset type"#, - rel.as_ref() + rel ), }) } @@ -207,7 +211,7 @@ impl AssetFile { /// Read the content of this asset to a String. pub async fn read_to_string(&self) -> Result { - async_std::fs::read_to_string(&self.path) + fs::read_to_string(&self.path) .await .with_context(|| format!("error reading file {:?} to string", self.path)) } diff --git a/src/pipelines/rust_app.rs b/src/pipelines/rust_app.rs index 126e11fd..4ba102a6 100644 --- a/src/pipelines/rust_app.rs +++ b/src/pipelines/rust_app.rs @@ -7,15 +7,14 @@ use std::sync::Arc; use anyhow::{anyhow, ensure, Context, Result}; use async_process::{Command, Stdio}; use async_std::fs; -use async_std::path::Path; use async_std::task::{spawn, JoinHandle}; use futures::channel::mpsc::Sender; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; -use super::TrunkLinkPipelineOutput; +use super::{LinkAttrs, TrunkLinkPipelineOutput}; use super::{ATTR_HREF, SNIPPETS_DIR}; -use crate::common::copy_dir_recursive; +use crate::common::{copy_dir_recursive, path_exists}; use crate::config::{CargoMetadata, RtcBuild}; /// A Rust application pipeline. @@ -39,14 +38,14 @@ impl RustApp { pub const TYPE_RUST_APP: &'static str = "rust"; pub async fn new( - cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, el: Selection<'_>, id: usize, + cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, attrs: LinkAttrs, id: usize, ) -> Result { // Build the path to the target asset. - let manifest_href = el - .attr(ATTR_HREF) - .map(|tendril| { + let manifest_href = attrs + .get(ATTR_HREF) + .map(|attr| { let mut path = PathBuf::new(); - path.extend(tendril.as_ref().split('/')); + path.extend(attr.split('/')); if !path.is_absolute() { path = html_dir.join(path); } @@ -56,7 +55,7 @@ impl RustApp { path }) .unwrap_or_else(|| html_dir.join("Cargo.toml")); - let bin = el.attr("data-bin").map(|val| val.to_string()); + let bin = attrs.get("data-bin").map(|val| val.to_string()); let manifest = CargoMetadata::new(&manifest_href).await?; let id = Some(id); @@ -172,7 +171,7 @@ impl RustApp { // Hash the built wasm app, then use that as the out-name param. self.progress.set_message("processing WASM"); - let wasm_bytes = async_std::fs::read(&wasm).await.context("error reading wasm file for hash generation")?; + let wasm_bytes = fs::read(&wasm).await.context("error reading wasm file for hash generation")?; let hashed_name = format!("index-{:x}", seahash::hash(&wasm_bytes)); Ok((wasm, hashed_name)) } @@ -228,7 +227,7 @@ impl RustApp { // Check for any snippets, and copy them over. let snippets_dir = bindgen_out.join(SNIPPETS_DIR); - if Path::new(&snippets_dir).exists().await { + if path_exists(&snippets_dir).await? { copy_dir_recursive(bindgen_out.join(SNIPPETS_DIR), self.cfg.staging_dist.join(SNIPPETS_DIR)) .await .context("error copying snippets dir to stage dir")?; diff --git a/src/pipelines/rust_worker.rs b/src/pipelines/rust_worker.rs index 5fa35224..c54967c3 100644 --- a/src/pipelines/rust_worker.rs +++ b/src/pipelines/rust_worker.rs @@ -9,9 +9,9 @@ use anyhow::{bail, Result}; use async_std::task::JoinHandle; use futures::channel::mpsc::Sender; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; -use super::TrunkLinkPipelineOutput; +use super::{LinkAttrs, TrunkLinkPipelineOutput}; use crate::config::{CargoMetadata, RtcBuild}; /// A Rust web worker pipeline. @@ -32,7 +32,7 @@ impl RustWorker { pub const TYPE_RUST_WORKER: &'static str = "rust-worker"; pub async fn new( - cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, el: Selection<'_>, id: usize, + cfg: Arc, progress: ProgressBar, html_dir: Arc, ignore_chan: Option>, attrs: LinkAttrs, id: usize, ) -> Result { bail!(r#"the rust web worker asset type `` is not yet supported"#) } diff --git a/src/pipelines/sass.rs b/src/pipelines/sass.rs index 17e382c4..d935ae0f 100644 --- a/src/pipelines/sass.rs +++ b/src/pipelines/sass.rs @@ -7,10 +7,10 @@ use anyhow::{anyhow, Context, Result}; use async_std::fs; use async_std::task::{spawn, spawn_blocking, JoinHandle}; use indicatif::ProgressBar; -use nipper::{Document, Selection}; +use nipper::Document; use super::ATTR_HREF; -use super::{AssetFile, HashedFileOutput, TrunkLinkPipelineOutput}; +use super::{AssetFile, HashedFileOutput, LinkAttrs, TrunkLinkPipelineOutput}; use crate::config::RtcBuild; /// A sass/scss asset pipeline. @@ -29,13 +29,13 @@ impl Sass { pub const TYPE_SASS: &'static str = "sass"; pub const TYPE_SCSS: &'static str = "scss"; - pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, el: Selection<'_>, id: usize) -> Result { + pub async fn new(cfg: Arc, progress: ProgressBar, html_dir: Arc, attrs: LinkAttrs, id: usize) -> Result { // Build the path to the target asset. - let href_attr = el - .attr(ATTR_HREF) - .ok_or_else(|| anyhow!("required attr `href` missing for element: {}", el.html()))?; + let href_attr = attrs + .get(ATTR_HREF) + .ok_or_else(|| anyhow!(r#"required attr `href` missing for element"#))?; let mut path = PathBuf::new(); - path.extend(href_attr.as_ref().split('/')); + path.extend(href_attr.split('/')); let asset = AssetFile::new(&html_dir, path).await?; Ok(Self { id, cfg, progress, asset }) } diff --git a/src/proxy.rs b/src/proxy.rs index ed245a31..92f184cd 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -1,8 +1,36 @@ -use http_types::Url; -use tide::{Request, Result}; +use std::sync::Arc; + +use anyhow::Context; +use async_std::task::spawn; +use async_tungstenite::async_std::connect_async; +use futures::prelude::*; +use http_types::{Method, Url}; +use tide::{Request, Result, Server}; +use tide_websockets::{WebSocket, WebSocketConnection}; use crate::serve::State; +/// All HTTP methods, used for registering proxy endpoints with proper precedence. +static HTTP_METHODS: [Method; 9] = [ + Method::Get, + Method::Head, + Method::Post, + Method::Put, + Method::Delete, + Method::Connect, + Method::Options, + Method::Trace, + Method::Patch, +]; + +/// Proxy handler functionality. +pub trait ProxyHandler { + /// The path on which this proxy handler is to listen. + fn path(&self) -> &str; + /// Register this proxy handler on the given app. + fn register(self: Arc, app: &mut Server); +} + /// A handler used for proxying HTTP requests to a backend. pub struct ProxyHandlerHttp { /// The URL of the backend to which requests are to be proxied. @@ -12,27 +40,45 @@ pub struct ProxyHandlerHttp { rewrite: Option, } +impl ProxyHandler for ProxyHandlerHttp { + fn path(&self) -> &str { + self.rewrite.as_ref().map(AsRef::as_ref).unwrap_or_else(|| self.backend.path()) + } + + fn register(self: Arc, app: &mut Server) { + // NOTE: we are using this loop instead of `.any` due to precedence issues in registering + // routes, as described here https://github.com/thedodd/trunk/issues/95#issuecomment-753508639 + for method in HTTP_METHODS.iter() { + let handler = self.clone(); + app.at(handler.path()).strip_prefix().method(*method, move |req: Request| { + let handler = handler.clone(); + async move { handler.proxy_request(req).await } + }); + } + } +} + impl ProxyHandlerHttp { /// Create a new instance. pub fn new(backend: Url, rewrite: Option) -> Self { Self { backend, rewrite } } - /// The path on which this proxy handler is to listen. - pub fn path(&self) -> &str { - self.rewrite.as_ref().map(AsRef::as_ref).unwrap_or_else(|| self.backend.path()) - } - /// Proxy the given request to the target backend. - pub async fn proxy_request(&self, mut req: Request) -> Result { - // Build a new request to be sent to the proxy backend. + async fn proxy_request(&self, mut req: Request) -> Result { + // Prep the backend URL for proxied request. let req_url = req.url(); let req_path = req_url.path(); let mut url = self.backend.clone(); if let Ok(mut segments) = url.path_segments_mut() { - segments.pop_if_empty().extend(req_path.trim_start_matches('/').split('/')); + // Don't extend if empty. + if req_path != "/" { + segments.pop_if_empty().extend(req_path.trim_start_matches('/').split('/')); + } } url.set_query(req_url.query()); + + // Build a new request to be sent to the proxy backend. let mut request = surf::RequestBuilder::new(req.method(), url).body(req.take_body()); for (hname, hval) in req.iter() { request = request.header(hname, hval); @@ -51,3 +97,86 @@ impl ProxyHandlerHttp { Ok(response.build()) } } + +/// A handler used for proxying WebSockets to a backend. +pub struct ProxyHandlerWebSocket { + /// The URL of the backend to which requests are to be proxied. + backend: Url, + /// An optional rewrite path to be used as the listening URI prefix, but which will be + /// stripped before being sent to the proxy backend. + rewrite: Option, + /// An HTTP handler used for proxying requests which are not actually WebSocket related. + http_handler: ProxyHandlerHttp, +} + +impl ProxyHandler for ProxyHandlerWebSocket { + fn path(&self) -> &str { + self.rewrite.as_ref().map(AsRef::as_ref).unwrap_or_else(|| self.backend.path()) + } + + fn register(self: Arc, app: &mut Server) { + let handler = self.clone(); + app.at(self.path()) + .strip_prefix() + .with(WebSocket::new(move |req, sock| self.clone().proxy_request(req, sock))) + .get(move |req| { + let handler = handler.clone(); + async move { handler.http_handler.proxy_request(req).await } + }); + } +} + +impl ProxyHandlerWebSocket { + /// Create a new instance. + pub fn new(backend: Url, rewrite: Option) -> Self { + let http_handler = ProxyHandlerHttp::new(backend.clone(), rewrite.clone()); + Self { + backend, + rewrite, + http_handler, + } + } + + /// Proxy the given request to the target backend. + async fn proxy_request(self: Arc, req: Request, frontend: WebSocketConnection) -> Result<()> { + // Prep the backend URL for opening the backend WebSocket connection. + let req_url = req.url(); + let req_path = req_url.path(); + let mut backend_url = self.backend.clone(); + if let Ok(mut segments) = backend_url.path_segments_mut() { + // Don't extend if empty. + if req_path != "/" { + segments.pop_if_empty().extend(req_path.trim_start_matches('/').split('/')); + } + } + + // Open a WebSocket connection to the backend. + let (mut backend_sink, mut backend_source) = connect_async(&backend_url) + .await + .with_context(|| format!("error establishing WebSocket connection to {:?}", backend_url))? + .0 + .split(); + + // Spawn a task for processing frontend messages. + let mut frontend_source = frontend.clone(); + let frontend_handle = spawn(async move { + while let Some(Ok(msg)) = frontend_source.next().await { + if let Err(err) = backend_sink.send(msg).await { + eprintln!("error forwarding frontend WebSocket message to backend: {:?}", err); + } + } + }); + + // Spawn a task for processing backend messages. + let backend_handle = spawn(async move { + while let Some(Ok(msg)) = backend_source.next().await { + if let Err(err) = frontend.send(msg).await { + eprintln!("error forwarding backend WebSocket message to frontend: {:?}", err); + } + } + }); + + futures::join!(frontend_handle, backend_handle); + Ok(()) + } +} diff --git a/src/serve.rs b/src/serve.rs index db72fafb..e628e40d 100644 --- a/src/serve.rs +++ b/src/serve.rs @@ -10,7 +10,7 @@ use tide::{Middleware, Next, Request, Response, StatusCode}; use crate::common::SERVER; use crate::config::RtcServe; -use crate::proxy::ProxyHandlerHttp; +use crate::proxy::{ProxyHandler, ProxyHandlerHttp, ProxyHandlerWebSocket}; use crate::watch::WatchSystem; /// A system encapsulating a build & watch system, responsible for serving generated content. @@ -67,20 +67,22 @@ impl ServeSystem { // Build proxies. if let Some(backend) = &cfg.proxy_backend { - let handler = Arc::new(ProxyHandlerHttp::new(backend.clone(), cfg.proxy_rewrite.clone())); + let handler: Arc = if cfg.proxy_ws { + Arc::new(ProxyHandlerWebSocket::new(backend.clone(), cfg.proxy_rewrite.clone())) + } else { + Arc::new(ProxyHandlerHttp::new(backend.clone(), cfg.proxy_rewrite.clone())) + }; progress.println(format!("{} proxying {} -> {}\n", SERVER, handler.path(), &backend)); - app.at(handler.path()).strip_prefix().all(move |req| { - let handler = handler.clone(); - async move { handler.proxy_request(req).await } - }); + handler.register(&mut app); } else if let Some(proxies) = &cfg.proxies { for proxy in proxies.iter() { - let handler = Arc::new(ProxyHandlerHttp::new(proxy.backend.clone(), proxy.rewrite.clone())); + let handler: Arc = if proxy.ws { + Arc::new(ProxyHandlerWebSocket::new(proxy.backend.clone(), proxy.rewrite.clone())) + } else { + Arc::new(ProxyHandlerHttp::new(proxy.backend.clone(), proxy.rewrite.clone())) + }; progress.println(format!("{} proxying {} -> {}\n", SERVER, handler.path(), &proxy.backend)); - app.at(handler.path()).strip_prefix().all(move |req| { - let handler = handler.clone(); - async move { handler.proxy_request(req).await } - }); + handler.register(&mut app); } }