Skip to content

Commit

Permalink
Add tag/remark to ranked outlier search filters
Browse files Browse the repository at this point in the history
  • Loading branch information
kimhanbeom committed Nov 7, 2023
1 parent 8b313a0 commit 2ed2308
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 12 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ Versioning](https://semver.org/spec/v2.0.0.html).
### Changed

- Change the type of `id` in `ranked_outlier`/`saved_outlier` queries to `StringNumber`.
- Modified Ranked Outliers graphql query to take in a SearchFilter with
`tag` and `remark`

## [0.14.5] - 2023-11-02

Expand Down
113 changes: 105 additions & 8 deletions src/graphql/outlier.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
use super::{always_true, model::ModelDigest, Role, RoleGuard, DEFAULT_CONNECTION_SIZE};
use super::{
always_true,
model::ModelDigest,
triage::response::{key, TriageResponse},
Role, RoleGuard, DEFAULT_CONNECTION_SIZE,
};
use crate::graphql::{earliest_key, latest_key};
use anyhow::anyhow;
use async_graphql::{
Expand Down Expand Up @@ -73,6 +78,8 @@ pub struct OutlierDistanceRange {
pub struct SearchFilterInput {
pub time: Option<OutlierTimeRange>,
distance: Option<OutlierDistanceRange>,
tag: Option<String>,
remark: Option<String>,
}

#[Object]
Expand Down Expand Up @@ -632,9 +639,19 @@ async fn load_ranked_outliers_with_filter(

let store = crate::graphql::get_store(ctx).await?;
let map = store.outlier_map().into_prefix_map(&prefix);
let remarks_map = store.triage_response_map();
let tags_map = store.event_tag_set();

let (nodes, has_previous, has_next) =
load_nodes_with_search_filter(&map, &filter, after, before, first, last)?;
let (nodes, has_previous, has_next) = load_nodes_with_search_filter(
&map,
&remarks_map,
&tags_map,
&filter,
after,
before,
first,
last,
)?;

let mut connection = Connection::with_additional_fields(
has_previous,
Expand All @@ -650,9 +667,11 @@ async fn load_ranked_outliers_with_filter(
Ok(connection)
}

#[allow(clippy::type_complexity)] // since this is called within `load` only
#[allow(clippy::type_complexity, clippy::too_many_arguments)] // since this is called within `load` only
fn load_nodes_with_search_filter<'m, M, I>(
map: &'m M,
remarks_map: &review_database::IndexedMap<'_>,
tags_map: &review_database::IndexedSet<'_>,
filter: &Option<SearchFilterInput>,
after: Option<String>,
before: Option<String>,
Expand All @@ -673,9 +692,25 @@ where

let (nodes, has_more) = if let Some(after) = after {
let to = earliest_key(&after)?;
iter_through_search_filter_nodes(iter, &to, cmp::Ordering::is_ge, filter, last)
iter_through_search_filter_nodes(
iter,
remarks_map,
tags_map,
&to,
cmp::Ordering::is_ge,
filter,
last,
)
} else {
iter_through_search_filter_nodes(iter, &[], always_true, filter, last)
iter_through_search_filter_nodes(
iter,
remarks_map,
tags_map,
&[],
always_true,
filter,
last,
)
}?;
Ok((nodes, has_more, false))
} else {
Expand All @@ -689,16 +724,35 @@ where

let (nodes, has_more) = if let Some(before) = before {
let to = latest_key(&before)?;
iter_through_search_filter_nodes(iter, &to, cmp::Ordering::is_le, filter, first)
iter_through_search_filter_nodes(
iter,
remarks_map,
tags_map,
&to,
cmp::Ordering::is_le,
filter,
first,
)
} else {
iter_through_search_filter_nodes(iter, &[], always_true, filter, first)
iter_through_search_filter_nodes(
iter,
remarks_map,
tags_map,
&[],
always_true,
filter,
first,
)
}?;
Ok((nodes, false, has_more))
}
}

#[allow(clippy::too_many_lines)]
fn iter_through_search_filter_nodes<I>(
iter: I,
remarks_map: &review_database::IndexedMap<'_>,
tags_map: &review_database::IndexedSet<'_>,
to: &[u8],
cond: fn(cmp::Ordering) -> bool,
filter: &Option<SearchFilterInput>,
Expand All @@ -709,6 +763,29 @@ where
{
let mut nodes = Vec::new();
let mut exceeded = false;

let tag_id_list = if let Some(filter) = filter {
if let Some(tag) = &filter.tag {
let index = tags_map.index()?;
let tag_ids: Vec<u32> = index
.iter()
.filter(|(_, name)| {
let name = String::from_utf8_lossy(name).into_owned();
name.contains(tag)
})
.map(|(id, _)| id)
.collect();
if tag_ids.is_empty() {
return Ok((nodes, exceeded));
}
Some(tag_ids)
} else {
None
}
} else {
None
};

for (k, v) in iter {
if !(cond)(k.as_ref().cmp(to)) {
break;
Expand All @@ -720,6 +797,26 @@ where
};

if let Some(filter) = filter {
if filter.remark.is_some() || tag_id_list.is_some() {
let key = key(&node.source, Utc.timestamp_nanos(node.id.0));
if let Some(value) = remarks_map.get_by_key(&key)? {
let value: TriageResponse = bincode::DefaultOptions::new()
.deserialize(value.as_ref())
.map_err(|_| "invalid value in database")?;
if let Some(remark) = &filter.remark {
if !value.remarks.contains(remark) {
continue;
}
}
if let Some(tag_ids) = &tag_id_list {
if !tag_ids.iter().any(|tag| value.tag_ids.contains(tag)) {
continue;
}
}
} else {
continue;
}
}
if let Some(time) = &filter.time {
if let Some(start) = time.start {
if let Some(end) = time.end {
Expand Down
9 changes: 5 additions & 4 deletions src/graphql/triage/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,17 @@ use review_database::{
};
use serde::{Deserialize, Serialize};

#[allow(clippy::module_name_repetitions)]
#[derive(Deserialize, Serialize, SimpleObject)]
#[graphql(complex)]
pub(super) struct TriageResponse {
pub struct TriageResponse {
#[graphql(skip)]
id: u32,
key: Vec<u8>,
source: String,
time: DateTime<Utc>,
tag_ids: Vec<u32>,
remarks: String,
pub tag_ids: Vec<u32>,
pub remarks: String,
creation_time: DateTime<Utc>,
last_modified_time: DateTime<Utc>,
}
Expand Down Expand Up @@ -134,7 +135,7 @@ async fn load(
>(&map, after, before, first, last, TriageResponseTotalCount)
}

fn key(source: &str, time: DateTime<Utc>) -> Vec<u8> {
pub fn key(source: &str, time: DateTime<Utc>) -> Vec<u8> {
let mut key = Vec::new();
key.extend_from_slice(source.as_bytes());
key.extend_from_slice(&time.timestamp_nanos_opt().unwrap_or_default().to_be_bytes());
Expand Down

0 comments on commit 2ed2308

Please sign in to comment.