Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tracing instrumentation #1270

Merged
merged 10 commits into from
Jul 2, 2024
14 changes: 13 additions & 1 deletion server/main-api/src/calendar/connectum.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::time::{Duration, Instant};
use std::{env, io};
use std::{env, fmt, io};

use chrono::{DateTime, Utc};
use oauth2::basic::{BasicClient, BasicTokenResponse};
Expand All @@ -16,6 +16,13 @@ pub(in crate::calendar) struct APIRequestor {
pool: PgPool,
oauth_token: Option<(Instant, BasicTokenResponse)>,
}
impl fmt::Debug for APIRequestor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("APIRequestor")
.field("oauth_token", &self.oauth_token.clone().map(|(i, _)| i))
.finish()
}
}

impl From<&PgPool> for APIRequestor {
fn from(pool: &PgPool) -> Self {
Expand All @@ -39,6 +46,7 @@ impl From<&PgPool> for APIRequestor {
}

impl APIRequestor {
#[tracing::instrument]
pub(crate) async fn refresh(&self, id: String) -> Result<(), crate::BoxedError> {
let sync_start = Utc::now();
let start = Instant::now();
Expand Down Expand Up @@ -75,6 +83,7 @@ impl APIRequestor {
}
true
}
#[tracing::instrument(ret(level = tracing::Level::TRACE))]
pub(crate) async fn try_refresh_token(&mut self) -> Result<String, crate::BoxedError> {
if self.should_refresh_token() {
self.oauth_token = Some(Self::fetch_new_oauth_token().await?);
Expand Down Expand Up @@ -146,6 +155,7 @@ impl APIRequestor {
Ok(())
}

#[tracing::instrument(ret(level = tracing::Level::TRACE))]
async fn fetch_new_oauth_token() -> Result<(Instant, BasicTokenResponse), crate::BoxedError> {
let client_id = env::var("CONNECTUM_OAUTH_CLIENT_ID")
.map_err(|e| {
Expand Down Expand Up @@ -176,6 +186,7 @@ impl APIRequestor {
.await;
Ok((Instant::now(), token?))
}
#[tracing::instrument(skip(tx))]
async fn delete_events(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Postgres>,
Expand All @@ -185,6 +196,7 @@ impl APIRequestor {
.execute(&mut **tx)
.await
}
#[tracing::instrument(skip(tx))]
async fn update_last_calendar_scrape_at(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Postgres>,
Expand Down
14 changes: 9 additions & 5 deletions server/main-api/src/calendar/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use sqlx::PgPool;
use tracing::error;

use crate::calendar::models::{CalendarLocation, Event, LocationEvents};
use crate::limited::hash_map::LimitedHashMap;
use crate::limited::vec::LimitedVec;

mod connectum;
mod models;
Expand Down Expand Up @@ -49,7 +51,7 @@ pub async fn calendar_handler(
Err(e) => return e,
};
let locations = match get_locations(&data.db, &ids).await {
Ok(l) => l,
Ok(l) => l.0,
Err(e) => return e,
};
if let Err(e) = validate_locations(&ids, &locations) {
Expand Down Expand Up @@ -91,25 +93,27 @@ fn validate_locations(ids: &[String], locations: &[CalendarLocation]) -> Result<
Ok(())
}

#[tracing::instrument(skip(pool))]
async fn get_locations(
pool: &PgPool,
ids: &[String],
) -> Result<Vec<CalendarLocation>, HttpResponse> {
) -> Result<LimitedVec<CalendarLocation>, HttpResponse> {
match sqlx::query_as!(CalendarLocation, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name FROM de WHERE key = ANY($1::text[])", ids).fetch_all(pool).await {
Err(e) => {
error!("could not refetch due to {e:?}");
Err(HttpResponse::InternalServerError().body("could not get calendar entries, please try again later"))
}
Ok(locations) => Ok(locations),
Ok(locations) => Ok(LimitedVec(locations)),
}
}

#[tracing::instrument(skip(pool),ret(level = tracing::Level::TRACE))]
async fn get_from_db(
pool: &PgPool,
locations: &[CalendarLocation],
start_after: &DateTime<Utc>,
end_before: &DateTime<Utc>,
) -> Result<HashMap<String, LocationEvents>, crate::BoxedError> {
) -> Result<LimitedHashMap<String, LocationEvents>, crate::BoxedError> {
let mut located_events: HashMap<String, LocationEvents> = HashMap::new();
for location in locations {
let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type
Expand All @@ -124,7 +128,7 @@ async fn get_from_db(
},
);
}
Ok(located_events)
Ok(LimitedHashMap(located_events))
}

#[cfg(test)]
Expand Down
8 changes: 1 addition & 7 deletions server/main-api/src/calendar/models.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
use crate::models::Location;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

#[derive(Serialize, Deserialize, Clone, Debug)]
pub(super) struct EventsCollection {
pub(super) events: HashMap<String, LocationEvents>,
pub(super) max_last_sync: DateTime<Utc>,
}
use crate::models::Location;

#[derive(Serialize, Deserialize, Clone, Debug)]
pub(super) struct CalendarLocation {
Expand Down
2 changes: 2 additions & 0 deletions server/main-api/src/calendar/refresh.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ const NUMBER_OF_CONCURRENT_SCRAPES: usize = 3;
struct LocationKey {
key: String,
}

#[tracing::instrument(skip(pool))]
pub async fn all_entries(pool: &PgPool) {
if let Err(e) = std::env::var("CONNECTUM_OAUTH_CLIENT_ID") {
error!("Please make sure that CONNECTUM_OAUTH_CLIENT_ID are valid to use calendar features: {e:?}");
Expand Down
5 changes: 3 additions & 2 deletions server/main-api/src/details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ pub async fn get_handler(
}
}

async fn get_alias_and_redirect(conn: &PgPool, query: &str) -> Option<(String, String)> {
#[tracing::instrument(skip(pool))]
async fn get_alias_and_redirect(pool: &PgPool, query: &str) -> Option<(String, String)> {
let result = sqlx::query_as!(
LocationKeyAlias,
r#"
Expand All @@ -59,7 +60,7 @@ async fn get_alias_and_redirect(conn: &PgPool, query: &str) -> Option<(String, S
WHERE alias = $1 OR key = $1 "#,
query
)
.fetch_all(conn)
.fetch_all(pool)
.await;
match result {
Ok(d) => {
Expand Down
2 changes: 2 additions & 0 deletions server/main-api/src/feedback/github.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ fn github_token() -> Result<String, ()> {
}
}

#[tracing::instrument]
pub async fn open_issue(title: &str, description: &str, labels: Vec<String>) -> HttpResponse {
let title = clean_feedback_data(title, 512);
let description = clean_feedback_data(description, 1024 * 1024);
Expand Down Expand Up @@ -56,6 +57,7 @@ pub async fn open_issue(title: &str, description: &str, labels: Vec<String>) ->
};
}

#[tracing::instrument]
pub async fn open_pr(
branch: String,
title: &str,
Expand Down
2 changes: 1 addition & 1 deletion server/main-api/src/feedback/proposed_edits/coordinate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ impl CoordinateFile {
}
}

#[derive(Deserialize, Clone, Default)]
#[derive(Deserialize, Debug, Clone, Copy, Default, PartialEq)]
pub struct Coordinate {
lat: f64,
lon: f64,
Expand Down
23 changes: 16 additions & 7 deletions server/main-api/src/feedback/proposed_edits/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use actix_web::{post, HttpResponse};
use serde::Deserialize;
use tracing::error;

use crate::limited::hash_map::LimitedHashMap;

use super::github;
use super::proposed_edits::coordinate::Coordinate;
use super::proposed_edits::image::Image;
Expand All @@ -17,7 +19,7 @@ mod discription;
mod image;
mod tmp_repo;

#[derive(Deserialize, Clone)]
#[derive(Debug, Deserialize, Clone)]
struct Edit {
coordinate: Option<Coordinate>,
image: Option<Image>,
Expand All @@ -26,16 +28,17 @@ pub trait AppliableEdit {
fn apply(&self, key: &str, base_dir: &Path) -> String;
}

#[derive(Deserialize)]
#[derive(Debug, Deserialize)]
pub struct EditRequest {
token: String,
edits: HashMap<String, Edit>,
edits: LimitedHashMap<String, Edit>,
additional_context: String,
privacy_checked: bool,
}

const GIT_URL: &str = "[email protected]:TUM-Dev/NavigaTUM.git";
impl EditRequest {
#[tracing::instrument]
async fn apply_changes_and_generate_description(
&self,
branch_name: &str,
Expand All @@ -48,6 +51,7 @@ impl EditRequest {
}
fn edits_for<T: AppliableEdit>(&self, extractor: fn(Edit) -> Option<T>) -> HashMap<String, T> {
self.edits
.0
.clone()
.into_iter()
.filter_map(|(k, edit)| extractor(edit).map(|coord| (k, coord)))
Expand All @@ -57,10 +61,15 @@ impl EditRequest {
fn extract_labels(&self) -> Vec<String> {
let mut labels = vec!["webform".to_string()];

if self.edits.iter().any(|(_, edit)| edit.coordinate.is_none()) {
if self
.edits
.0
.iter()
.any(|(_, edit)| edit.coordinate.is_none())
{
labels.push("coordinate".to_string());
}
if self.edits.iter().any(|(_, edit)| edit.image.is_none()) {
if self.edits.0.iter().any(|(_, edit)| edit.image.is_none()) {
labels.push("image".to_string());
}
labels
Expand Down Expand Up @@ -100,12 +109,12 @@ pub async fn propose_edits(
.content_type("text/plain")
.body("Using this endpoint without accepting the privacy policy is not allowed");
};
if req_data.edits.is_empty() {
if req_data.edits.0.is_empty() {
return HttpResponse::UnprocessableEntity()
.content_type("text/plain")
.body("Not enough edits provided");
};
if req_data.edits.len() > 500 {
if req_data.edits.0.len() > 500 {
return HttpResponse::InsufficientStorage()
.content_type("text/plain")
.body("Too many edits provided");
Expand Down
5 changes: 5 additions & 0 deletions server/main-api/src/feedback/proposed_edits/tmp_repo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@ use tracing::{debug, info};
use super::discription::Description;
use super::EditRequest;

#[derive(Debug)]
pub struct TempRepo {
dir: tempfile::TempDir,
branch_name: String,
}
impl TempRepo {
#[tracing::instrument]
pub async fn clone_and_checkout(
url: &'static str,
branch_name: &str,
Expand Down Expand Up @@ -48,6 +50,7 @@ impl TempRepo {
}
}

#[tracing::instrument]
pub fn apply_and_gen_description(&self, edits: &EditRequest) -> Description {
let mut description = Description::default();
description.add_context(&edits.additional_context);
Expand All @@ -60,6 +63,7 @@ impl TempRepo {
description
}

#[tracing::instrument]
pub async fn commit(&self, title: &str) -> Result<(), crate::BoxedError> {
let out = Command::new("git")
.current_dir(&self.dir)
Expand All @@ -82,6 +86,7 @@ impl TempRepo {
_ => Err(format!("git commit failed with output: {out:?}").into()),
}
}
#[tracing::instrument]
pub async fn push(&self) -> Result<(), crate::BoxedError> {
let out = Command::new("git")
.current_dir(&self.dir)
Expand Down
10 changes: 10 additions & 0 deletions server/main-api/src/feedback/tokens.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::fmt;

use actix_web::HttpResponse;
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
Expand All @@ -7,6 +9,13 @@ use tracing::error;
#[derive(Default)]
pub struct RecordedTokens(Mutex<Vec<TokenRecord>>);

impl fmt::Debug for RecordedTokens {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
//fields purposely omitted
f.debug_struct("RecordedTokens").finish()
}
}

pub struct TokenRecord {
kid: u64,
next_reset: i64,
Expand Down Expand Up @@ -43,6 +52,7 @@ impl Claims {
}

impl RecordedTokens {
#[tracing::instrument(skip(token))]
pub async fn validate(&self, token: &str) -> Option<HttpResponse> {
if !able_to_process_feedback() {
return Some(
Expand Down
60 changes: 60 additions & 0 deletions server/main-api/src/limited/hash_map.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
use std::collections::HashMap;
use std::fmt;
use std::hash::Hash;

use serde::{Deserialize, Serialize};

use crate::limited::OrMore;

#[derive(Serialize, Deserialize, Clone, Default)]
pub struct LimitedHashMap<K: Eq + Hash, V>(pub HashMap<K, V>);

impl<K: Eq + Hash, V> From<HashMap<K, V>> for LimitedHashMap<K, V> {
fn from(value: HashMap<K, V>) -> Self {
LimitedHashMap(value)
}
}

const LIMIT: usize = 3;
impl<K: fmt::Debug + Eq + Hash + Clone + Ord, V: fmt::Debug + Clone> fmt::Debug
for LimitedHashMap<K, V>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut collection = self.0.clone().into_iter().collect::<Vec<(K, V)>>();
collection.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2));
if self.0.len() <= LIMIT {
f.debug_map().entries(collection).finish()
} else {
f.debug_map()
.entries(
collection
.into_iter()
.take(LIMIT)
.map(|(k, v)| (OrMore::Value(k), OrMore::Value(v)))
.chain([(OrMore::More, OrMore::More)]),
)
.finish()
}
}
}

#[cfg(test)]
mod test {
use super::*;

#[test]
fn test_limited_output() {
let w: LimitedHashMap<u32, u32> = LimitedHashMap(HashMap::new());
assert_eq!(format!("{w:?}"), "{}");
let w = LimitedHashMap(HashMap::from([(1, 1)]));
assert_eq!(format!("{w:?}"), "{1: 1}");
let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2)]));
assert_eq!(format!("{w:?}"), "{1: 1, 2: 2}");
let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3)]));
assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3}");
let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3), (4, 4)]));
assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3, ...: ...}");
let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)]));
assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3, ...: ...}");
}
}
Loading
Loading