fix: bug for dynamic reloading of config files
This commit is contained in:
parent
3d60175c11
commit
7c2205f275
5 changed files with 44 additions and 12 deletions
|
|
@ -36,6 +36,7 @@ pub async fn entrypoint<T>(
|
|||
proxy_config: &ProxyConfig,
|
||||
app_config_list: &AppConfigList<T>,
|
||||
runtime_handle: &tokio::runtime::Handle,
|
||||
term_notify: Option<Arc<tokio::sync::Notify>>,
|
||||
) -> Result<()>
|
||||
where
|
||||
T: CryptoSource + Clone + Send + Sync + 'static,
|
||||
|
|
@ -68,7 +69,7 @@ where
|
|||
runtime_handle: runtime_handle.clone(),
|
||||
});
|
||||
|
||||
// TODO: HTTP2 only client is needed for http2 cleartext case
|
||||
// build message handler including a request forwarder
|
||||
let msg_handler = Arc::new(
|
||||
HttpMessageHandlerBuilder::default()
|
||||
.forwarder(Arc::new(Forwarder::new().await))
|
||||
|
|
@ -91,7 +92,7 @@ where
|
|||
.build()
|
||||
.unwrap();
|
||||
|
||||
globals.runtime_handle.spawn(proxy.start())
|
||||
globals.runtime_handle.spawn(proxy.start(term_notify.clone()))
|
||||
}));
|
||||
|
||||
// wait for all future
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ use std::{net::SocketAddr, sync::Arc};
|
|||
use tokio::{
|
||||
io::{AsyncRead, AsyncWrite},
|
||||
runtime::Handle,
|
||||
sync::Notify,
|
||||
time::{timeout, Duration},
|
||||
};
|
||||
|
||||
|
|
@ -123,7 +124,7 @@ where
|
|||
}
|
||||
|
||||
/// Entrypoint for HTTP/1.1 and HTTP/2 servers
|
||||
pub async fn start(self) -> Result<()> {
|
||||
pub async fn start(self, term_notify: Option<Arc<Notify>>) -> Result<()> {
|
||||
let mut server = Http::new();
|
||||
server.http1_keep_alive(self.globals.proxy_config.keepalive);
|
||||
server.http2_max_concurrent_streams(self.globals.proxy_config.max_concurrent_streams);
|
||||
|
|
@ -131,12 +132,35 @@ where
|
|||
let executor = LocalExecutor::new(self.globals.runtime_handle.clone());
|
||||
let server = server.with_executor(executor);
|
||||
|
||||
if self.tls_enabled {
|
||||
self.start_with_tls(server).await?;
|
||||
} else {
|
||||
self.start_without_tls(server).await?;
|
||||
let listening_on = self.listening_on;
|
||||
|
||||
let proxy_service = async {
|
||||
if self.tls_enabled {
|
||||
self.start_with_tls(server).await
|
||||
} else {
|
||||
self.start_without_tls(server).await
|
||||
}
|
||||
};
|
||||
|
||||
match term_notify {
|
||||
Some(term) => {
|
||||
tokio::select! {
|
||||
_ = proxy_service => {
|
||||
warn!("Proxy service got down");
|
||||
}
|
||||
_ = term.notified() => {
|
||||
info!("Proxy service listening on {} receives term signal", listening_on);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
proxy_service.await?;
|
||||
warn!("Proxy service got down");
|
||||
}
|
||||
}
|
||||
|
||||
// proxy_service.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue