refactor: extract run_app function (#843)

* refactor: extract run_app function

* lint

* change route to /metrics

* refactor
release-fix-intellij-update-support-version-range
Meng Zhang 2023-11-19 17:00:35 -08:00 committed by GitHub
parent 0908eb1fb4
commit d8e83d0610
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 69 additions and 85 deletions

View File

@ -4,7 +4,7 @@
* Add distribution support (running completion / chat model on different process / machine).
* Add conversation history in chat playground.
* Add `/v1/metrics` endpoint for prometheus metrics collection.
* Add `/metrics` endpoint for prometheus metrics collection.
## Fixes and Improvements

View File

@ -26,7 +26,6 @@ bump-release-version:
update-openapi-doc:
curl http://localhost:8080/api-docs/openapi.json | jq ' \
delpaths([ \
["paths", "/v1/metrics"], \
["paths", "/v1beta/chat/completions"], \
["paths", "/v1beta/search"], \
["components", "schemas", "CompletionRequest", "properties", "prompt"], \

View File

@ -3,14 +3,6 @@ use std::sync::Arc;
use axum::extract::State;
use axum_prometheus::metrics_exporter_prometheus::PrometheusHandle;
#[utoipa::path(
get,
path = "/v1/metrics",
tag = "v1",
responses(
(status = 200, description = "Success", body = String, content_type = "text/plain"),
)
)]
pub async fn metrics(State(state): State<Arc<PrometheusHandle>>) -> String {
state.render()
}

View File

@ -1,13 +1,53 @@
mod metrics;
use std::{
net::{Ipv4Addr, SocketAddr},
sync::Arc,
};
use axum::{routing, Router};
use axum_prometheus::PrometheusMetricLayer;
use axum_tracing_opentelemetry::opentelemetry_tracing_layer;
use hyper::Server;
use tower_http::cors::CorsLayer;
use tracing::info;
use crate::fatal;
pub async fn run_app(api: Router, ui: Option<Router>, port: u16) {
let (prometheus_layer, prometheus_handle) = PrometheusMetricLayer::pair();
let app = api
.layer(CorsLayer::permissive())
.layer(opentelemetry_tracing_layer())
.layer(prometheus_layer)
.route(
"/metrics",
routing::get(metrics::metrics).with_state(Arc::new(prometheus_handle)),
);
let app = if let Some(ui) = ui {
app.merge(ui)
} else {
app
};
let address = SocketAddr::from((Ipv4Addr::UNSPECIFIED, port));
info!("Listening at {}", address);
Server::bind(&address)
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.await
.unwrap_or_else(|err| fatal!("Error happens during serving: {}", err))
}
mod chat;
mod completions;
mod events;
mod health;
mod metrics;
mod search;
pub use chat::*;
pub use completions::*;
pub use events::*;
pub use health::*;
pub use metrics::*;
pub use search::*;

View File

@ -1,12 +1,6 @@
use std::{
net::{Ipv4Addr, SocketAddr},
sync::Arc,
time::Duration,
};
use std::{sync::Arc, time::Duration};
use axum::{routing, Router, Server};
use axum_prometheus::{metrics_exporter_prometheus::PrometheusHandle, PrometheusMetricLayer};
use axum_tracing_opentelemetry::opentelemetry_tracing_layer;
use axum::{routing, Router};
use clap::Args;
use tabby_common::{
api,
@ -15,13 +9,13 @@ use tabby_common::{
usage,
};
use tokio::time::sleep;
use tower_http::{cors::CorsLayer, timeout::TimeoutLayer};
use tower_http::timeout::TimeoutLayer;
use tracing::info;
use utoipa::OpenApi;
use utoipa_swagger_ui::SwaggerUi;
use crate::{
fatal, routes,
routes::{self, run_app},
services::{
chat::{self, create_chat_service},
code::create_code_search,
@ -50,7 +44,7 @@ Install following IDE / Editor extensions to get started with [Tabby](https://gi
servers(
(url = "/", description = "Server"),
),
paths(routes::log_event, routes::completions, routes::completions, routes::health, routes::search, routes::metrics),
paths(routes::log_event, routes::completions, routes::completions, routes::health, routes::search),
components(schemas(
api::event::LogEventRequest,
completion::CompletionRequest,
@ -109,32 +103,19 @@ pub async fn main(config: &Config, args: &ServeArgs) {
let logger = Arc::new(create_logger());
let code = Arc::new(create_code_search());
let (prometheus_layer, prometheus_handle) = PrometheusMetricLayer::pair();
let metrics_handle = Arc::new(prometheus_handle);
let app = Router::new()
.merge(api_router(args, config, logger.clone(), code.clone(), metrics_handle).await)
let api = api_router(args, config, logger.clone(), code.clone()).await;
let ui = Router::new()
.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", ApiDoc::openapi()));
#[cfg(feature = "ee")]
let app = tabby_webserver::attach_webserver(app, logger, code).await;
let (api, ui) = tabby_webserver::attach_webserver(api, ui, logger, code).await;
#[cfg(not(feature = "ee"))]
let app = app.fallback(|| async { axum::response::Redirect::permanent("/swagger-ui") });
let app = app
.layer(CorsLayer::permissive())
.layer(opentelemetry_tracing_layer())
.layer(prometheus_layer);
let address = SocketAddr::from((Ipv4Addr::UNSPECIFIED, args.port));
info!("Listening at {}", address);
let ui = ui.fallback(|| async { axum::response::Redirect::permanent("/swagger-ui") });
start_heartbeat(args);
Server::bind(&address)
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.await
.unwrap_or_else(|err| fatal!("Error happens during serving: {}", err))
run_app(api, Some(ui), args.port).await
}
async fn load_model(args: &ServeArgs) {
@ -152,7 +133,6 @@ async fn api_router(
config: &Config,
logger: Arc<dyn EventLogger>,
code: Arc<dyn CodeSearch>,
metrics_handle: Arc<PrometheusHandle>,
) -> Router {
let completion_state = if let Some(model) = &args.model {
Some(Arc::new(
@ -199,10 +179,6 @@ async fn api_router(
"/v1/health",
routing::get(routes::health).with_state(health_state),
)
.route(
"/v1/metrics",
routing::get(routes::metrics).with_state(metrics_handle),
)
});
if let Some(completion_state) = completion_state {

View File

@ -1,21 +1,13 @@
use std::{
env::consts::ARCH,
net::{Ipv4Addr, SocketAddr},
sync::Arc,
};
use std::{env::consts::ARCH, sync::Arc};
use anyhow::Result;
use axum::{routing, Router};
use axum_prometheus::PrometheusMetricLayer;
use axum_tracing_opentelemetry::opentelemetry_tracing_layer;
use clap::Args;
use hyper::Server;
use tabby_webserver::api::{tracing_context, HubClient, WorkerKind};
use tower_http::cors::CorsLayer;
use tracing::{info, warn};
use crate::{
fatal, routes,
routes::{self, run_app},
services::{
chat::create_chat_service,
completion::create_completion_service,
@ -86,29 +78,12 @@ pub async fn main(kind: WorkerKind, args: &WorkerArgs) {
let context = WorkerContext::new(&args.url).await;
let (prometheus_layer, prometheus_handle) = PrometheusMetricLayer::pair();
let app = match kind {
WorkerKind::Completion => make_completion_route(context, args).await,
WorkerKind::Chat => make_chat_route(context, args).await,
};
let app = app
.route(
"/v1/metrics",
routing::get(routes::metrics).with_state(Arc::new(prometheus_handle)),
)
.layer(CorsLayer::permissive())
.layer(opentelemetry_tracing_layer())
.layer(prometheus_layer);
let address = SocketAddr::from((Ipv4Addr::UNSPECIFIED, args.port));
info!("Listening at {}", address);
Server::bind(&address)
.serve(app.into_make_service())
.await
.unwrap_or_else(|err| fatal!("Error happens during serving: {}", err))
run_app(app, None, args.port).await
}
struct WorkerContext {

View File

@ -31,28 +31,30 @@ use server::ServerContext;
use tarpc::server::{BaseChannel, Channel};
pub async fn attach_webserver(
router: Router,
api: Router,
ui: Router,
logger: Arc<dyn RawEventLogger>,
code: Arc<dyn CodeSearch>,
) -> Router {
) -> (Router, Router) {
let conn = db::DbConn::new().await.unwrap();
let ctx = Arc::new(ServerContext::new(conn, logger, code));
let schema = Arc::new(create_schema());
let app = Router::new()
.route("/graphql", routing::get(playground("/graphql", None)))
.route("/graphiql", routing::get(graphiql("/graphql", None)))
let api = api
.layer(from_fn_with_state(ctx.clone(), distributed_tabby_layer))
.route(
"/graphql",
routing::post(graphql::<Arc<Schema>>).with_state(ctx.clone()),
)
.layer(Extension(schema));
.layer(Extension(schema))
.route("/hub", routing::get(ws_handler).with_state(ctx.clone()));
router
.merge(app)
.route("/hub", routing::get(ws_handler).with_state(ctx.clone()))
.fallback(ui::handler)
.layer(from_fn_with_state(ctx, distributed_tabby_layer))
let ui = ui
.route("/graphql", routing::get(playground("/graphql", None)))
.route("/graphiql", routing::get(graphiql("/graphql", None)))
.fallback(ui::handler);
(api, ui)
}
async fn distributed_tabby_layer(