fixes history loading slowly due to context switches

This commit is contained in:
2025-01-04 19:13:32 -05:00
parent c69022448f
commit 4dd7cea97d
12 changed files with 278 additions and 122 deletions

View File

@@ -198,6 +198,8 @@ const show_data_at_time = computed(() => {
} }
}); });
const should_fade = ref(false);
provide<GraphData>(GRAPH_DATA, { provide<GraphData>(GRAPH_DATA, {
border_top: border_top, border_top: border_top,
min_x: min_x, min_x: min_x,
@@ -216,6 +218,7 @@ provide<GraphData>(GRAPH_DATA, {
legend_y_stride: legend_y_stride, legend_y_stride: legend_y_stride,
legend_width: legend_width_output, legend_width: legend_width_output,
cursor_time: show_data_at_time, cursor_time: show_data_at_time,
should_fade: (value) => (should_fade.value = value),
}); });
</script> </script>
@@ -286,7 +289,9 @@ provide<GraphData>(GRAPH_DATA, {
></TimeText> ></TimeText>
</template> </template>
</g> </g>
<slot></slot> <g :class="`${should_fade ? 'fade' : ''}`">
<slot></slot>
</g>
<g class="cursor_tick" v-if="mouse_t && cursor"> <g class="cursor_tick" v-if="mouse_t && cursor">
<rect <rect
:x="x_map(mouse_t) - 100" :x="x_map(mouse_t) - 100"

View File

@@ -35,6 +35,7 @@ const smoothing_distance_x = 5;
const maximum_minimum_separation_live = 100; // ms const maximum_minimum_separation_live = 100; // ms
const legend_line_length = 8; const legend_line_length = 8;
const legend_text_offset = 4; const legend_text_offset = 4;
const marker_radius = 3;
const text_offset = computed(() => 10); const text_offset = computed(() => 10);
const min_sep = computed(() => const min_sep = computed(() =>
@@ -152,8 +153,8 @@ watch([graph_data.min_x, graph_data.max_x], ([min_x, max_x]) => {
if (min_x) { if (min_x) {
while ( while (
memo.value.data.length > 1 && memo.value.data.length > 2 &&
memo.value.data[0].x < toValue(min_x) memo.value.data[1].x < toValue(min_x)
) { ) {
memo.value.data.shift(); memo.value.data.shift();
memo_changed = true; memo_changed = true;
@@ -161,8 +162,8 @@ watch([graph_data.min_x, graph_data.max_x], ([min_x, max_x]) => {
} }
if (max_x) { if (max_x) {
while ( while (
memo.value.data.length > 1 && memo.value.data.length > 2 &&
memo.value.data[memo.value.data.length - 1].x > toValue(max_x) memo.value.data[memo.value.data.length - 2].x > toValue(max_x)
) { ) {
memo.value.data.pop(); memo.value.data.pop();
memo_changed = true; memo_changed = true;
@@ -171,8 +172,8 @@ watch([graph_data.min_x, graph_data.max_x], ([min_x, max_x]) => {
if (memo_changed) { if (memo_changed) {
let min_val = Infinity; let min_val = Infinity;
let max_val = -Infinity; let max_val = -Infinity;
for (const item of memo.value.data) { for (let i = 1; i < memo.value.data.length; i++) {
const item_val = item.y; const item_val = memo.value.data[i].y;
min_val = Math.min(min_val, item_val); min_val = Math.min(min_val, item_val);
max_val = Math.max(max_val, item_val); max_val = Math.max(max_val, item_val);
} }
@@ -318,50 +319,95 @@ function onCloseLegend() {
}, 1); }, 1);
} }
} }
const legend_moused_over = ref(false);
function onMouseEnter(event: MouseEvent) {
if (event.target == event.currentTarget) {
legend_moused_over.value = true;
graph_data.should_fade(true);
}
}
function onMouseExit(event: MouseEvent) {
if (event.target == event.currentTarget) {
legend_moused_over.value = false;
graph_data.should_fade(false);
}
}
</script> </script>
<template> <template>
<g :class="`indexed-color color-${index}`"> <g
:class="`indexed-color color-${index} ${legend_moused_over ? 'no-fade' : ''}`"
>
<defs>
<marker
:id="`dot-${index}`"
:refX="marker_radius"
:refY="marker_radius"
markerUnits="strokeWidth"
:markerWidth="marker_radius * 2"
:markerHeight="marker_radius * 2"
>
<circle
:cx="marker_radius"
:cy="marker_radius"
:r="marker_radius"
:class="`indexed-color color-${index}`"
/>
</marker>
</defs>
<g clip-path="url(#content)"> <g clip-path="url(#content)">
<polyline <polyline
class="fade_other_selected"
fill="none" fill="none"
:transform="group_transform" :transform="group_transform"
:points="points" :points="points"
></polyline> ></polyline>
<polyline fill="none" :points="current_data_point_line"> </polyline> <polyline
class="fade_other_selected"
fill="none"
:marker-start="`url(#dot-${index})`"
:points="current_data_point_line"
>
</polyline>
</g> </g>
<ValueLabel <ValueLabel
v-if="current_data_point" v-if="current_data_point"
class="fade_other_selected"
:x="graph_data.x_map(toValue(graph_data.max_x)) + text_offset" :x="graph_data.x_map(toValue(graph_data.max_x)) + text_offset"
:y="axis_data.y_map(current_data_point.y)" :y="axis_data.y_map(current_data_point.y)"
:value="current_data_point.y" :value="current_data_point.y"
> >
</ValueLabel> </ValueLabel>
<template v-if="toValue(graph_data.legend_enabled)"> <template v-if="toValue(graph_data.legend_enabled)">
<rect <g @mouseenter="onMouseEnter" @mouseleave="onMouseExit">
ref="legend-ref" <rect
:class="`legend ${is_selected ? 'selected' : ''}`" ref="legend-ref"
:x="legend_x - legend_text_offset" :class="`legend ${is_selected ? 'selected' : ''}`"
:y="legend_y" :x="legend_x - legend_text_offset"
:width="toValue(graph_data.legend_width)" :y="legend_y"
:height="toValue(graph_data.legend_y_stride)" :width="toValue(graph_data.legend_width)"
@click="onOpenLegend" :height="toValue(graph_data.legend_y_stride)"
> @click="onOpenLegend"
</rect> >
<polyline </rect>
class="legend" <polyline
fill="none" class="legend"
:points="legend_line" fill="none"
@click="onOpenLegend" :points="legend_line"
></polyline> @click="onOpenLegend"
<text ></polyline>
class="legend" <text
:x="legend_x + legend_line_length + legend_text_offset" class="legend"
:y="legend_y + 1 + toValue(graph_data.legend_y_stride) / 2" :x="legend_x + legend_line_length + legend_text_offset"
@click="onOpenLegend" :y="legend_y + 1 + toValue(graph_data.legend_y_stride) / 2"
> @click="onOpenLegend"
{{ legend_text }} >
</text> {{ legend_moused_over }} {{ legend_text }}
</text>
</g>
<foreignObject height="0" width="0"> <foreignObject height="0" width="0">
<TooltipDialog <TooltipDialog
:show="is_selected" :show="is_selected"
@@ -397,12 +443,28 @@ function onCloseLegend() {
</g> </g>
</template> </template>
<style lang="scss">
@use '@/assets/variables';
.fade .fade_other_selected {
opacity: 25%;
}
.fade .no-fade .fade_other_selected {
opacity: 100%;
}
</style>
<style scoped lang="scss"> <style scoped lang="scss">
@use '@/assets/variables'; @use '@/assets/variables';
.indexed-color {
stroke: var(--indexed-color);
fill: var(--indexed-color);
}
polyline { polyline {
stroke-width: 1px; stroke-width: 1px;
stroke: var(--indexed-color);
} }
text { text {
@@ -420,6 +482,10 @@ rect.legend {
fill: transparent; fill: transparent;
} }
.legend {
pointer-events: all;
}
rect.legend.selected, rect.legend.selected,
rect.legend:hover, rect.legend:hover,
rect.legend:has(~ .legend:hover) { rect.legend:has(~ .legend:hover) {

View File

@@ -6,6 +6,7 @@ defineProps<{
x: number; x: number;
y: number; y: number;
value: number; value: number;
class?: string;
}>(); }>();
const background_offset = computed(() => 5); const background_offset = computed(() => 5);
@@ -33,12 +34,13 @@ function update_value_text(text: string) {
<template> <template>
<rect <rect
:class="$props.class"
:x="x - background_offset" :x="x - background_offset"
:y="y - y_offset - background_offset" :y="y - y_offset - background_offset"
:width="label_width + background_offset * 2" :width="label_width + background_offset * 2"
:height="16 + background_offset * 2" :height="16 + background_offset * 2"
></rect> ></rect>
<text ref="label-ref" :x="x" :y="y"> <text :class="$props.class" ref="label-ref" :x="x" :y="y">
<NumericText <NumericText
:value="value" :value="value"
:max_width="6" :max_width="6"

View File

@@ -24,4 +24,5 @@ export interface GraphData {
legend_y_stride: MaybeRefOrGetter<number>; legend_y_stride: MaybeRefOrGetter<number>;
legend_width: MaybeRefOrGetter<number>; legend_width: MaybeRefOrGetter<number>;
cursor_time: MaybeRefOrGetter<number>; cursor_time: MaybeRefOrGetter<number>;
should_fade: (should_fade: boolean) => void;
} }

View File

@@ -5,6 +5,8 @@ use chrono::{DateTime, TimeDelta, Utc};
use log::trace; use log::trace;
use serde::Deserialize; use serde::Deserialize;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::time::timeout;
#[get("/tlm/info/{name:[\\w\\d/_-]+}")] #[get("/tlm/info/{name:[\\w\\d/_-]+}")]
async fn get_tlm_definition( async fn get_tlm_definition(
@@ -16,7 +18,6 @@ async fn get_tlm_definition(
let Some(data) = data.get_by_name(&string) else { let Some(data) = data.get_by_name(&string) else {
return Err(HttpServerResultError::TlmNameNotFound { tlm: string }); return Err(HttpServerResultError::TlmNameNotFound { tlm: string });
}; };
Ok(web::Json(data.definition.clone())) Ok(web::Json(data.definition.clone()))
} }
@@ -29,7 +30,7 @@ struct HistoryQuery {
#[get("/tlm/history/{uuid:[0-9a-f]+}")] #[get("/tlm/history/{uuid:[0-9a-f]+}")]
async fn get_tlm_history( async fn get_tlm_history(
data: web::Data<Arc<TelemetryManagementService>>, data_arc: web::Data<Arc<TelemetryManagementService>>,
uuid: web::Path<String>, uuid: web::Path<String>,
info: web::Query<HistoryQuery>, info: web::Query<HistoryQuery>,
) -> Result<impl Responder, HttpServerResultError> { ) -> Result<impl Responder, HttpServerResultError> {
@@ -54,14 +55,18 @@ async fn get_tlm_history(
}; };
let maximum_resolution = TimeDelta::milliseconds(info.resolution); let maximum_resolution = TimeDelta::milliseconds(info.resolution);
let history_service = data.history_service(); let history_service = data_arc.history_service();
let data = data.pin(); let data = data_arc.pin();
match data.get_by_uuid(&uuid) { match data.get_by_uuid(&uuid) {
None => Err(HttpServerResultError::TlmUuidNotFound { uuid }), None => Err(HttpServerResultError::TlmUuidNotFound { uuid }),
Some(tlm) => Ok(web::Json( Some(tlm) => timeout(
tlm.get(from, to, maximum_resolution, &history_service) Duration::from_secs(10),
.await, tlm.get(from, to, maximum_resolution, &history_service),
)), )
.await
.map(|result| Ok(web::Json(result)))
.unwrap_or_else(|_| Err(HttpServerResultError::Timeout)),
} }
} }

View File

@@ -15,6 +15,8 @@ pub enum HttpServerResultError {
TlmUuidNotFound { uuid: String }, TlmUuidNotFound { uuid: String },
#[error("DateTime Parsing Error: {date_time}")] #[error("DateTime Parsing Error: {date_time}")]
InvalidDateTime { date_time: String }, InvalidDateTime { date_time: String },
#[error("Timed out")]
Timeout,
} }
impl ResponseError for HttpServerResultError { impl ResponseError for HttpServerResultError {
@@ -23,6 +25,7 @@ impl ResponseError for HttpServerResultError {
HttpServerResultError::TlmNameNotFound { .. } => StatusCode::NOT_FOUND, HttpServerResultError::TlmNameNotFound { .. } => StatusCode::NOT_FOUND,
HttpServerResultError::TlmUuidNotFound { .. } => StatusCode::NOT_FOUND, HttpServerResultError::TlmUuidNotFound { .. } => StatusCode::NOT_FOUND,
HttpServerResultError::InvalidDateTime { .. } => StatusCode::BAD_REQUEST, HttpServerResultError::InvalidDateTime { .. } => StatusCode::BAD_REQUEST,
HttpServerResultError::Timeout { .. } => StatusCode::GATEWAY_TIMEOUT,
} }
} }
fn error_response(&self) -> HttpResponse { fn error_response(&self) -> HttpResponse {

View File

@@ -6,7 +6,7 @@ use crate::http::api::setup_api;
use crate::http::websocket::setup_websocket; use crate::http::websocket::setup_websocket;
use crate::telemetry::management_service::TelemetryManagementService; use crate::telemetry::management_service::TelemetryManagementService;
use actix_web::{web, App, HttpServer}; use actix_web::{web, App, HttpServer};
use log::info; use log::{error, info};
use std::sync::Arc; use std::sync::Arc;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
@@ -29,5 +29,7 @@ pub async fn setup(
.run() .run()
.await?; .await?;
error!("http setup end");
Ok(()) Ok(())
} }

View File

@@ -1,5 +1,6 @@
mod grpc; mod grpc;
mod http; mod http;
mod serialization;
mod telemetry; mod telemetry;
mod uuid; mod uuid;
@@ -11,6 +12,8 @@ use crate::telemetry::history::TelemetryHistoryService;
use crate::telemetry::management_service::TelemetryManagementService; use crate::telemetry::management_service::TelemetryManagementService;
use log::error; use log::error;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::time::sleep;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
pub async fn setup() -> anyhow::Result<()> { pub async fn setup() -> anyhow::Result<()> {
@@ -35,7 +38,14 @@ pub async fn setup() -> anyhow::Result<()> {
grpc_server.await?; //grpc server is dropped grpc_server.await?; //grpc server is dropped
drop(cancellation_token); // All cancellation tokens are now dropped drop(cancellation_token); // All cancellation tokens are now dropped
error!("after awaits");
// Perform cleanup functions - at this point all servers have stopped and we can be sure that cleaning things up is safe // Perform cleanup functions - at this point all servers have stopped and we can be sure that cleaning things up is safe
for _ in 0..15 {
if Arc::strong_count(&tlm) != 1 {
sleep(Duration::from_secs(1)).await;
}
}
if let Some(tlm) = Arc::into_inner(tlm) { if let Some(tlm) = Arc::into_inner(tlm) {
tlm.cleanup().await?; tlm.cleanup().await?;
} else { } else {

View File

@@ -0,0 +1,24 @@
use std::fs::File;
use std::io;
pub trait FileWriteableType {
fn write_to_file(self, file: &mut File) -> io::Result<()>;
}
pub trait FileReadableType: Sized {
fn read_from_file(file: &mut File) -> io::Result<Self>;
}
pub trait FileExt {
fn write_data<T: FileWriteableType>(&mut self, data: T) -> io::Result<()>;
fn read_data<T: FileReadableType>(&mut self) -> io::Result<T>;
}
impl FileExt for File {
fn write_data<T: FileWriteableType>(&mut self, data: T) -> io::Result<()> {
data.write_to_file(self)
}
fn read_data<T: FileReadableType>(&mut self) -> io::Result<T> {
T::read_from_file(self)
}
}

View File

@@ -0,0 +1,2 @@
pub mod file_ext;
pub mod primitives;

View File

@@ -0,0 +1,33 @@
use crate::serialization::file_ext::{FileReadableType, FileWriteableType};
use std::fs::File;
use std::io::{Read, Write};
macro_rules! primitive_write_read {
( $primitive:ty, $length:expr ) => {
impl FileWriteableType for $primitive {
fn write_to_file(self, file: &mut File) -> std::io::Result<()> {
file.write_all(&self.to_be_bytes())
}
}
impl FileReadableType for $primitive {
fn read_from_file(file: &mut File) -> std::io::Result<Self> {
let mut buffer = [0u8; $length];
file.read_exact(&mut buffer)?;
Ok(Self::from_be_bytes(buffer))
}
}
};
}
primitive_write_read!(u64, 8);
primitive_write_read!(u32, 4);
primitive_write_read!(u16, 2);
primitive_write_read!(u8, 1);
primitive_write_read!(i64, 8);
primitive_write_read!(i32, 4);
primitive_write_read!(i16, 2);
primitive_write_read!(i8, 1);
primitive_write_read!(f64, 8);
primitive_write_read!(f32, 4);

View File

@@ -1,20 +1,20 @@
use crate::core::TelemetryDataType; use crate::core::TelemetryDataType;
use crate::serialization::file_ext::FileExt;
use crate::telemetry::data::TelemetryData; use crate::telemetry::data::TelemetryData;
use crate::telemetry::data_item::TelemetryDataItem; use crate::telemetry::data_item::TelemetryDataItem;
use crate::telemetry::data_value::TelemetryDataValue; use crate::telemetry::data_value::TelemetryDataValue;
use crate::telemetry::definition::TelemetryDefinition; use crate::telemetry::definition::TelemetryDefinition;
use anyhow::{ensure, Context}; use anyhow::{anyhow, ensure, Context};
use chrono::{DateTime, DurationRound, SecondsFormat, TimeDelta, Utc}; use chrono::{DateTime, DurationRound, SecondsFormat, TimeDelta, Utc};
use log::{error, info}; use log::{error, info};
use std::cmp::min; use std::cmp::min;
use std::collections::VecDeque; use std::collections::VecDeque;
use std::io::SeekFrom; use std::fs::File;
use std::io::{Seek, SeekFrom, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::{fs, path}; use std::{fs, path};
use tokio::fs::File; use tokio::task::{spawn_blocking, JoinHandle};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
use tokio::task::JoinHandle;
const FOLDER_DURATION: TimeDelta = TimeDelta::hours(1); const FOLDER_DURATION: TimeDelta = TimeDelta::hours(1);
@@ -133,21 +133,18 @@ impl HistorySegmentRam {
} }
} }
struct HistorySegmentDisk { struct HistorySegmentFile {
start: DateTime<Utc>, start: DateTime<Utc>,
end: DateTime<Utc>, end: DateTime<Utc>,
length: u64, length: u64,
file: File, file: File,
} }
impl HistorySegmentDisk { impl HistorySegmentFile {
const TIMESTAMP_LENGTH: u64 = 8 + 4; const TIMESTAMP_LENGTH: u64 = 8 + 4;
const HEADER_LENGTH: u64 = Self::TIMESTAMP_LENGTH + Self::TIMESTAMP_LENGTH + 8; const HEADER_LENGTH: u64 = Self::TIMESTAMP_LENGTH + Self::TIMESTAMP_LENGTH + 8;
async fn save_to_disk( fn save_to_disk(mut folder: PathBuf, mut segment: HistorySegmentRam) -> anyhow::Result<Self> {
mut folder: PathBuf,
mut segment: HistorySegmentRam,
) -> anyhow::Result<Self> {
// Get the path for the specific timestamp we want to save to disk // Get the path for the specific timestamp we want to save to disk
let folder_time = segment.start.duration_trunc(FOLDER_DURATION)?; let folder_time = segment.start.duration_trunc(FOLDER_DURATION)?;
folder.push(folder_time.to_rfc3339_opts(SecondsFormat::Secs, true)); folder.push(folder_time.to_rfc3339_opts(SecondsFormat::Secs, true));
@@ -161,7 +158,7 @@ impl HistorySegmentDisk {
segment.start.to_rfc3339_opts(SecondsFormat::Secs, true) segment.start.to_rfc3339_opts(SecondsFormat::Secs, true)
)); ));
let file = File::create(file).await?; let file = File::create(file)?;
let mut result = Self { let mut result = Self {
start: segment.start, start: segment.start,
@@ -176,14 +173,16 @@ impl HistorySegmentDisk {
// Write the segment bounds // Write the segment bounds
result result
.file .file
.write_i64(utc_offset_start.num_seconds()) .write_data::<i64>(utc_offset_start.num_seconds())?;
.await?;
result result
.file .file
.write_i32(utc_offset_start.subsec_nanos()) .write_data::<i32>(utc_offset_start.subsec_nanos())?;
.await?; result
result.file.write_i64(utc_offset_end.num_seconds()).await?; .file
result.file.write_i32(utc_offset_end.subsec_nanos()).await?; .write_data::<i64>(utc_offset_end.num_seconds())?;
result
.file
.write_data::<i32>(utc_offset_end.subsec_nanos())?;
let data = segment.data.get_mut().unwrap_or_else(|err| { let data = segment.data.get_mut().unwrap_or_else(|err| {
error!( error!(
@@ -199,28 +198,28 @@ impl HistorySegmentDisk {
); );
result.length = data.timestamps.len() as u64; result.length = data.timestamps.len() as u64;
result.file.write_u64(result.length).await?; result.file.write_data::<u64>(result.length)?;
// Write all the timestamps // Write all the timestamps
for timestamp in &data.timestamps { for timestamp in &data.timestamps {
let utc_offset = *timestamp - DateTime::UNIX_EPOCH; let utc_offset = *timestamp - DateTime::UNIX_EPOCH;
result.file.write_i64(utc_offset.num_seconds()).await?; result.file.write_data::<i64>(utc_offset.num_seconds())?;
result.file.write_i32(utc_offset.subsec_nanos()).await?; result.file.write_data::<i32>(utc_offset.subsec_nanos())?;
} }
// Write all the values // Write all the values
for value in &data.values { for value in &data.values {
match value { match value {
TelemetryDataValue::Float32(value) => result.file.write_f32(*value).await?, TelemetryDataValue::Float32(value) => result.file.write_data::<f32>(*value)?,
TelemetryDataValue::Float64(value) => result.file.write_f64(*value).await?, TelemetryDataValue::Float64(value) => result.file.write_data::<f64>(*value)?,
} }
} }
result.file.flush().await?; result.file.flush()?;
Ok(result) Ok(result)
} }
async fn load_to_ram( fn load_to_ram(
mut self, mut self,
telemetry_data_type: TelemetryDataType, telemetry_data_type: TelemetryDataType,
) -> anyhow::Result<HistorySegmentRam> { ) -> anyhow::Result<HistorySegmentRam> {
@@ -229,14 +228,14 @@ impl HistorySegmentDisk {
timestamps: Vec::with_capacity(self.length as usize), timestamps: Vec::with_capacity(self.length as usize),
}; };
self.file.seek(SeekFrom::Start(Self::HEADER_LENGTH)).await?; self.file.seek(SeekFrom::Start(Self::HEADER_LENGTH))?;
for _ in 0..self.length { for _ in 0..self.length {
segment_data.timestamps.push(self.read_date_time().await?); segment_data.timestamps.push(self.read_date_time()?);
} }
for _ in 0..self.length { for _ in 0..self.length {
segment_data segment_data
.values .values
.push(self.read_telemetry_item(telemetry_data_type).await?); .push(self.read_telemetry_item(telemetry_data_type)?);
} }
Ok(HistorySegmentRam { Ok(HistorySegmentRam {
@@ -246,7 +245,7 @@ impl HistorySegmentDisk {
}) })
} }
async fn open(folder: PathBuf, start: DateTime<Utc>) -> anyhow::Result<Self> { fn open(folder: PathBuf, start: DateTime<Utc>) -> anyhow::Result<Self> {
// Get the path for the specific timestamp we want to save to disk // Get the path for the specific timestamp we want to save to disk
let folder_time = start.duration_trunc(FOLDER_DURATION)?; let folder_time = start.duration_trunc(FOLDER_DURATION)?;
let mut file = folder; let mut file = folder;
@@ -256,21 +255,21 @@ impl HistorySegmentDisk {
start.to_rfc3339_opts(SecondsFormat::Secs, true) start.to_rfc3339_opts(SecondsFormat::Secs, true)
)); ));
let mut file = File::open(file).await?; let mut file = File::open(file)?;
// Write the segment bounds // Write the segment bounds
let start_seconds = file.read_i64().await?; let start_seconds = file.read_data::<i64>()?;
let start_nanos = file.read_i32().await?; let start_nanos = file.read_data::<i32>()?;
let end_seconds = file.read_i64().await?; let end_seconds = file.read_data::<i64>()?;
let end_nanos = file.read_i32().await?; let end_nanos = file.read_data::<i32>()?;
let start = TimeDelta::new(start_seconds, start_nanos as u32) let start = TimeDelta::new(start_seconds, start_nanos as u32)
.context("Failed to reconstruct start TimeDelta")?; .context("Failed to reconstruct start TimeDelta")?;
let end = TimeDelta::new(end_seconds, end_nanos as u32) let end = TimeDelta::new(end_seconds, end_nanos as u32)
.context("Failed to reconstruct end TimeDelta")?; .context("Failed to reconstruct end TimeDelta")?;
let length = file.read_u64().await?; let length = file.read_data::<u64>()?;
Ok(HistorySegmentDisk { Ok(HistorySegmentFile {
start: DateTime::UNIX_EPOCH + start, start: DateTime::UNIX_EPOCH + start,
end: DateTime::UNIX_EPOCH + end, end: DateTime::UNIX_EPOCH + end,
length, length,
@@ -278,7 +277,7 @@ impl HistorySegmentDisk {
}) })
} }
async fn get( fn get(
&mut self, &mut self,
from: DateTime<Utc>, from: DateTime<Utc>,
to: DateTime<Utc>, to: DateTime<Utc>,
@@ -290,10 +289,10 @@ impl HistorySegmentDisk {
let mut next_from = from; let mut next_from = from;
if from < self.end && self.start < to { if from < self.end && self.start < to {
let start = self.partition_point(from).await?; let start = self.partition_point(from)?;
if start < self.length { if start < self.length {
for i in start..self.length { for i in start..self.length {
let t = self.get_date_time(i).await?; let t = self.get_date_time(i)?;
if t >= self.end { if t >= self.end {
break; break;
} }
@@ -306,7 +305,7 @@ impl HistorySegmentDisk {
next_from, next_from,
); );
result.push(TelemetryDataItem { result.push(TelemetryDataItem {
value: self.get_telemetry_item(i, telemetry_data_type).await?, value: self.get_telemetry_item(i, telemetry_data_type)?,
timestamp: t.to_rfc3339_opts(SecondsFormat::Millis, true), timestamp: t.to_rfc3339_opts(SecondsFormat::Millis, true),
}); });
} }
@@ -317,38 +316,36 @@ impl HistorySegmentDisk {
Ok((next_from, result)) Ok((next_from, result))
} }
async fn read_date_time(&mut self) -> anyhow::Result<DateTime<Utc>> { fn read_date_time(&mut self) -> anyhow::Result<DateTime<Utc>> {
let seconds = self.file.read_i64().await?; let seconds = self.file.read_data::<i64>()?;
let nanos = self.file.read_i32().await?; let nanos = self.file.read_data::<i32>()?;
let start = let start =
TimeDelta::new(seconds, nanos as u32).context("Failed to reconstruct TimeDelta")?; TimeDelta::new(seconds, nanos as u32).context("Failed to reconstruct TimeDelta")?;
Ok(DateTime::UNIX_EPOCH + start) Ok(DateTime::UNIX_EPOCH + start)
} }
async fn get_date_time(&mut self, index: u64) -> anyhow::Result<DateTime<Utc>> { fn get_date_time(&mut self, index: u64) -> anyhow::Result<DateTime<Utc>> {
self.file self.file.seek(SeekFrom::Start(
.seek(SeekFrom::Start( Self::HEADER_LENGTH + index * Self::TIMESTAMP_LENGTH,
Self::HEADER_LENGTH + index * Self::TIMESTAMP_LENGTH, ))?;
)) self.read_date_time()
.await?;
self.read_date_time().await
} }
async fn read_telemetry_item( fn read_telemetry_item(
&mut self, &mut self,
telemetry_data_type: TelemetryDataType, telemetry_data_type: TelemetryDataType,
) -> anyhow::Result<TelemetryDataValue> { ) -> anyhow::Result<TelemetryDataValue> {
match telemetry_data_type { match telemetry_data_type {
TelemetryDataType::Float32 => { TelemetryDataType::Float32 => {
Ok(TelemetryDataValue::Float32(self.file.read_f32().await?)) Ok(TelemetryDataValue::Float32(self.file.read_data::<f32>()?))
} }
TelemetryDataType::Float64 => { TelemetryDataType::Float64 => {
Ok(TelemetryDataValue::Float64(self.file.read_f64().await?)) Ok(TelemetryDataValue::Float64(self.file.read_data::<f64>()?))
} }
} }
} }
async fn get_telemetry_item( fn get_telemetry_item(
&mut self, &mut self,
index: u64, index: u64,
telemetry_data_type: TelemetryDataType, telemetry_data_type: TelemetryDataType,
@@ -357,15 +354,13 @@ impl HistorySegmentDisk {
TelemetryDataType::Float32 => 4, TelemetryDataType::Float32 => 4,
TelemetryDataType::Float64 => 8, TelemetryDataType::Float64 => 8,
}; };
self.file self.file.seek(SeekFrom::Start(
.seek(SeekFrom::Start( Self::HEADER_LENGTH + self.length * Self::TIMESTAMP_LENGTH + index * item_length,
Self::HEADER_LENGTH + self.length * Self::TIMESTAMP_LENGTH + index * item_length, ))?;
)) self.read_telemetry_item(telemetry_data_type)
.await?;
self.read_telemetry_item(telemetry_data_type).await
} }
async fn partition_point(&mut self, date_time: DateTime<Utc>) -> anyhow::Result<u64> { fn partition_point(&mut self, date_time: DateTime<Utc>) -> anyhow::Result<u64> {
if self.length == 0 { if self.length == 0 {
return Ok(0); return Ok(0);
} }
@@ -379,7 +374,7 @@ impl HistorySegmentDisk {
let half = size / 2; let half = size / 2;
let mid = left + half; let mid = left + half;
let is_less = self.get_date_time(mid).await? < date_time; let is_less = self.get_date_time(mid)? < date_time;
if is_less { if is_less {
left = mid; left = mid;
} }
@@ -387,7 +382,7 @@ impl HistorySegmentDisk {
} }
Ok(left Ok(left
+ if self.get_date_time(left).await? < date_time { + if self.get_date_time(left)? < date_time {
1 1
} else { } else {
0 0
@@ -423,8 +418,8 @@ impl TelemetryHistory {
) -> JoinHandle<()> { ) -> JoinHandle<()> {
let mut path = service.data_root_folder.clone(); let mut path = service.data_root_folder.clone();
path.push(&self.data.definition.uuid); path.push(&self.data.definition.uuid);
tokio::spawn(async move { spawn_blocking(move || {
match HistorySegmentDisk::save_to_disk(path, history_segment_ram).await { match HistorySegmentFile::save_to_disk(path, history_segment_ram) {
// Immediately drop the segment - now that we've saved it to disk we don't need to keep it in memory // Immediately drop the segment - now that we've saved it to disk we don't need to keep it in memory
Ok(segment) => drop(segment), Ok(segment) => drop(segment),
Err(err) => { Err(err) => {
@@ -437,14 +432,14 @@ impl TelemetryHistory {
}) })
} }
async fn get_disk_segment( fn get_disk_segment(
&self, &self,
service: &TelemetryHistoryService, service: &TelemetryHistoryService,
start: DateTime<Utc>, start: DateTime<Utc>,
) -> anyhow::Result<HistorySegmentDisk> { ) -> JoinHandle<anyhow::Result<HistorySegmentFile>> {
let mut path = service.data_root_folder.clone(); let mut path = service.data_root_folder.clone();
path.push(&self.data.definition.uuid); path.push(&self.data.definition.uuid);
HistorySegmentDisk::open(path, start).await spawn_blocking(move || HistorySegmentFile::open(path, start))
} }
async fn create_ram_segment( async fn create_ram_segment(
@@ -453,14 +448,20 @@ impl TelemetryHistory {
service: &TelemetryHistoryService, service: &TelemetryHistoryService,
telemetry_data_type: TelemetryDataType, telemetry_data_type: TelemetryDataType,
) -> HistorySegmentRam { ) -> HistorySegmentRam {
let ram = match self.get_disk_segment(service, start).await { let ram = self
Ok(disk) => disk.load_to_ram(telemetry_data_type).await, .get_disk_segment(service, start)
.await
.unwrap_or_else(|e| Err(anyhow!("Join Error {e}")))
.map(|disk| spawn_blocking(move || disk.load_to_ram(telemetry_data_type)));
let ram = match ram {
Ok(ram) => ram.await.unwrap_or_else(|e| Err(anyhow!("Join Error {e}"))),
Err(e) => Err(e), Err(e) => Err(e),
}; };
match ram { match ram {
Ok(ram) => ram, Ok(ram) => ram,
Err(_) => HistorySegmentRam::new(start, start + service.segment_width), _ => HistorySegmentRam::new(start, start + service.segment_width),
} }
} }
@@ -560,14 +561,11 @@ impl TelemetryHistory {
let mut start = start; let mut start = start;
while start < end { while start < end {
// We're going to ignore errors with getting the disk segment // We're going to ignore errors with getting the disk segment
if let Ok(mut disk) = self if let Ok(Ok(mut disk)) = self
.get_disk_segment(telemetry_history_service, start) .get_disk_segment(telemetry_history_service, start)
.await .await
{ {
match disk match disk.get(from, to, maximum_resolution, self.data.definition.data_type) {
.get(from, to, maximum_resolution, self.data.definition.data_type)
.await
{
Ok((new_from, new_data)) => { Ok((new_from, new_data)) => {
from = new_from; from = new_from;
result.extend(new_data); result.extend(new_data);
@@ -593,8 +591,13 @@ impl TelemetryHistory {
pub async fn cleanup(&self, service: &TelemetryHistoryService) -> anyhow::Result<()> { pub async fn cleanup(&self, service: &TelemetryHistoryService) -> anyhow::Result<()> {
let mut segments = self.segments.write().await; let mut segments = self.segments.write().await;
for segment in segments.drain(..) { let segments = segments
self.cleanup_segment(service, segment).await?; .drain(..)
.map(|segment| self.cleanup_segment(service, segment))
.collect::<Vec<_>>();
for segment in segments {
segment.await?;
} }
Ok(()) Ok(())