Skip to content

Commit ad92bfd

Browse files
author
Devdutt Shenoi
committed
Merge remote-tracking branch 'origin/main' into ingest
2 parents 614af32 + 6346928 commit ad92bfd

15 files changed

+174
-194
lines changed

.github/workflows/integration-test.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ jobs:
1111

1212
docker-compose-test:
1313
name: Quest Smoke and Load Tests for Standalone deployments
14-
runs-on: ubuntu-latest
14+
runs-on: self-hosted
1515
steps:
1616
- name: Checkout
1717
uses: actions/checkout@v4
@@ -23,7 +23,7 @@ jobs:
2323

2424
docker-compose-distributed-test:
2525
name: Quest Smoke and Load Tests for Distributed deployments
26-
runs-on: ubuntu-latest
26+
runs-on: self-hosted
2727
steps:
2828
- name: Checkout
2929
uses: actions/checkout@v4

docker-compose-distributed-test-with-kafka.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,7 @@ services:
189189
quest:
190190
platform: linux/amd64
191191
image: ghcr.io/parseablehq/quest:main
192+
pull_policy: always
192193
command:
193194
[
194195
"load",

docker-compose-distributed-test.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,7 @@ services:
100100
quest:
101101
platform: linux/amd64
102102
image: ghcr.io/parseablehq/quest:main
103+
pull_policy: always
103104
command:
104105
[
105106
"load",

docker-compose-test-with-kafka.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ networks:
33

44
services:
55
minio:
6-
image: minio/minio:RELEASE.2023-02-10T18-48-39Z
6+
image: minio/minio:RELEASE.2025-02-03T21-03-04Z
77
entrypoint:
88
- sh
99
- -euc
@@ -67,6 +67,7 @@ services:
6767
quest:
6868
image: ghcr.io/parseablehq/quest:main
6969
platform: linux/amd64
70+
pull_policy: always
7071
command: [
7172
"load",
7273
"http://parseable:8000",

docker-compose-test.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ networks:
33

44
services:
55
minio:
6-
image: minio/minio:RELEASE.2023-02-10T18-48-39Z
6+
image: minio/minio:RELEASE.2025-02-03T21-03-04Z
77
entrypoint:
88
- sh
99
- -euc
@@ -60,6 +60,7 @@ services:
6060

6161
quest:
6262
image: ghcr.io/parseablehq/quest:main
63+
pull_policy: always
6364
platform: linux/amd64
6465
command: [
6566
"load",

src/alerts/mod.rs

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use once_cell::sync::Lazy;
2727
use serde_json::Error as SerdeError;
2828
use std::collections::{HashMap, HashSet};
2929
use std::fmt::{self, Display};
30-
use tokio::sync::oneshot::{Receiver, Sender};
30+
use tokio::sync::oneshot::{self, Receiver, Sender};
3131
use tokio::sync::RwLock;
3232
use tokio::task::JoinHandle;
3333
use tracing::{trace, warn};
@@ -733,10 +733,17 @@ impl Alerts {
733733
let store = PARSEABLE.storage.get_object_store();
734734

735735
for alert in store.get_alerts().await.unwrap_or_default() {
736-
let (handle, rx, tx) =
737-
schedule_alert_task(alert.get_eval_frequency(), alert.clone()).await?;
738-
739-
self.update_task(alert.id, handle, rx, tx).await;
736+
let (outbox_tx, outbox_rx) = oneshot::channel::<()>();
737+
let (inbox_tx, inbox_rx) = oneshot::channel::<()>();
738+
let handle = schedule_alert_task(
739+
alert.get_eval_frequency(),
740+
alert.clone(),
741+
inbox_rx,
742+
outbox_tx,
743+
)?;
744+
745+
self.update_task(alert.id, handle, outbox_rx, inbox_tx)
746+
.await;
740747

741748
map.insert(alert.id, alert);
742749
}

src/handlers/http/alerts.rs

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ use actix_web::{
2525
HttpRequest, Responder,
2626
};
2727
use bytes::Bytes;
28+
use tokio::sync::oneshot;
2829
use ulid::Ulid;
2930

3031
use crate::alerts::{
@@ -55,7 +56,14 @@ pub async fn post(
5556
user_auth_for_query(&session_key, &alert.query).await?;
5657

5758
// create scheduled tasks
58-
let (handle, rx, tx) = schedule_alert_task(alert.get_eval_frequency(), alert.clone()).await?;
59+
let (outbox_tx, outbox_rx) = oneshot::channel::<()>();
60+
let (inbox_tx, inbox_rx) = oneshot::channel::<()>();
61+
let handle = schedule_alert_task(
62+
alert.get_eval_frequency(),
63+
alert.clone(),
64+
inbox_rx,
65+
outbox_tx,
66+
)?;
5967

6068
// now that we've validated that the user can run this query
6169
// move on to saving the alert in ObjectStore
@@ -67,7 +75,9 @@ pub async fn post(
6775
let alert_bytes = serde_json::to_vec(&alert)?;
6876
store.put_object(&path, Bytes::from(alert_bytes)).await?;
6977

70-
ALERTS.update_task(alert.id, handle, rx, tx).await;
78+
ALERTS
79+
.update_task(alert.id, handle, outbox_rx, inbox_tx)
80+
.await;
7181

7282
Ok(web::Json(alert))
7383
}
@@ -136,7 +146,14 @@ pub async fn modify(
136146
alert.validate().await?;
137147

138148
// modify task
139-
let (handle, rx, tx) = schedule_alert_task(alert.get_eval_frequency(), alert.clone()).await?;
149+
let (outbox_tx, outbox_rx) = oneshot::channel::<()>();
150+
let (inbox_tx, inbox_rx) = oneshot::channel::<()>();
151+
let handle = schedule_alert_task(
152+
alert.get_eval_frequency(),
153+
alert.clone(),
154+
inbox_rx,
155+
outbox_tx,
156+
)?;
140157

141158
// modify on disk
142159
PARSEABLE
@@ -148,7 +165,9 @@ pub async fn modify(
148165
// modify in memory
149166
ALERTS.update(&alert).await;
150167

151-
ALERTS.update_task(alert.id, handle, rx, tx).await;
168+
ALERTS
169+
.update_task(alert.id, handle, outbox_rx, inbox_tx)
170+
.await;
152171

153172
Ok(web::Json(alert))
154173
}

src/handlers/http/modal/ingest_server.rs

Lines changed: 9 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
*
1717
*/
1818

19+
use std::thread;
20+
1921
use actix_web::web;
2022
use actix_web::Scope;
2123
use actix_web_prometheus::PrometheusMetrics;
@@ -25,7 +27,6 @@ use bytes::Bytes;
2527
use relative_path::RelativePathBuf;
2628
use serde_json::Value;
2729
use tokio::sync::oneshot;
28-
use tracing::error;
2930

3031
use crate::{
3132
analytics,
@@ -100,65 +101,21 @@ impl ParseableServer for IngestServer {
100101

101102
migration::run_migration(&PARSEABLE).await?;
102103

103-
let (localsync_handler, mut localsync_outbox, localsync_inbox) =
104-
sync::run_local_sync().await;
105-
let (mut remote_sync_handler, mut remote_sync_outbox, mut remote_sync_inbox) =
106-
sync::object_store_sync().await;
107-
let (
108-
mut remote_conversion_handler,
109-
mut remote_conversion_outbox,
110-
mut remote_conversion_inbox,
111-
) = sync::arrow_conversion().await;
104+
// Run sync on a background thread
105+
let (cancel_tx, cancel_rx) = oneshot::channel();
106+
thread::spawn(|| sync::handler(cancel_rx));
112107

113108
tokio::spawn(airplane::server());
114109

115110
// write the ingestor metadata to storage
116111
PARSEABLE.store_ingestor_metadata().await?;
117112

118113
// Ingestors shouldn't have to deal with OpenId auth flow
119-
let app = self.start(shutdown_rx, prometheus.clone(), None);
120-
121-
tokio::pin!(app);
122-
loop {
123-
tokio::select! {
124-
e = &mut app => {
125-
// actix server finished .. stop other threads and stop the server
126-
remote_sync_inbox.send(()).unwrap_or(());
127-
localsync_inbox.send(()).unwrap_or(());
128-
remote_conversion_inbox.send(()).unwrap_or(());
129-
if let Err(e) = localsync_handler.await {
130-
error!("Error joining remote_sync_handler: {:?}", e);
131-
}
132-
if let Err(e) = remote_sync_handler.await {
133-
error!("Error joining remote_sync_handler: {:?}", e);
134-
}
135-
if let Err(e) = remote_conversion_handler.await {
136-
error!("Error joining remote_conversion_handler: {:?}", e);
137-
}
138-
return e
139-
},
140-
_ = &mut localsync_outbox => {
141-
// crash the server if localsync fails for any reason
142-
// panic!("Local Sync thread died. Server will fail now!")
143-
return Err(anyhow::Error::msg("Failed to sync local data to drive. Please restart the Parseable server.\n\nJoin us on Parseable Slack if the issue persists after restart : https://launchpass.com/parseable"))
144-
},
145-
_ = &mut remote_sync_outbox => {
146-
// remote_sync failed, this is recoverable by just starting remote_sync thread again
147-
if let Err(e) = remote_sync_handler.await {
148-
error!("Error joining remote_sync_handler: {:?}", e);
149-
}
150-
(remote_sync_handler, remote_sync_outbox, remote_sync_inbox) = sync::object_store_sync().await;
151-
},
152-
_ = &mut remote_conversion_outbox => {
153-
// remote_conversion failed, this is recoverable by just starting remote_conversion thread again
154-
if let Err(e) = remote_conversion_handler.await {
155-
error!("Error joining remote_conversion_handler: {:?}", e);
156-
}
157-
(remote_conversion_handler, remote_conversion_outbox, remote_conversion_inbox) = sync::arrow_conversion().await;
158-
}
114+
let result = self.start(shutdown_rx, prometheus.clone(), None).await;
115+
// Cancel sync jobs
116+
cancel_tx.send(()).expect("Cancellation should not fail");
159117

160-
}
161-
}
118+
result
162119
}
163120
}
164121

src/handlers/http/modal/query_server.rs

Lines changed: 11 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
*
1717
*/
1818

19+
use std::thread;
20+
1921
use crate::alerts::ALERTS;
2022
use crate::correlation::CORRELATIONS;
2123
use crate::handlers::airplane;
@@ -26,10 +28,9 @@ use crate::handlers::http::{logstream, MAX_EVENT_PAYLOAD_SIZE};
2628
use crate::handlers::http::{rbac, role};
2729
use crate::hottier::HotTierManager;
2830
use crate::rbac::role::Action;
29-
use crate::sync;
3031
use crate::users::dashboards::DASHBOARDS;
3132
use crate::users::filters::FILTERS;
32-
use crate::{analytics, migration, storage};
33+
use crate::{analytics, migration, storage, sync};
3334
use actix_web::web::{resource, ServiceConfig};
3435
use actix_web::{web, Scope};
3536
use actix_web_prometheus::PrometheusMetrics;
@@ -132,44 +133,18 @@ impl ParseableServer for QueryServer {
132133
hot_tier_manager.put_internal_stream_hot_tier().await?;
133134
hot_tier_manager.download_from_s3()?;
134135
};
135-
let (localsync_handler, mut localsync_outbox, localsync_inbox) =
136-
sync::run_local_sync().await;
137-
let (mut remote_sync_handler, mut remote_sync_outbox, mut remote_sync_inbox) =
138-
sync::object_store_sync().await;
136+
137+
// Run sync on a background thread
138+
let (cancel_tx, cancel_rx) = oneshot::channel();
139+
thread::spawn(|| sync::handler(cancel_rx));
139140

140141
tokio::spawn(airplane::server());
141-
let app = self.start(shutdown_rx, prometheus.clone(), PARSEABLE.options.openid());
142142

143-
tokio::pin!(app);
144-
loop {
145-
tokio::select! {
146-
e = &mut app => {
147-
// actix server finished .. stop other threads and stop the server
148-
remote_sync_inbox.send(()).unwrap_or(());
149-
localsync_inbox.send(()).unwrap_or(());
150-
if let Err(e) = localsync_handler.await {
151-
error!("Error joining localsync_handler: {:?}", e);
152-
}
153-
if let Err(e) = remote_sync_handler.await {
154-
error!("Error joining remote_sync_handler: {:?}", e);
155-
}
156-
return e
157-
},
158-
_ = &mut localsync_outbox => {
159-
// crash the server if localsync fails for any reason
160-
// panic!("Local Sync thread died. Server will fail now!")
161-
return Err(anyhow::Error::msg("Failed to sync local data to drive. Please restart the Parseable server.\n\nJoin us on Parseable Slack if the issue persists after restart : https://launchpass.com/parseable"))
162-
},
163-
_ = &mut remote_sync_outbox => {
164-
// remote_sync failed, this is recoverable by just starting remote_sync thread again
165-
if let Err(e) = remote_sync_handler.await {
166-
error!("Error joining remote_sync_handler: {:?}", e);
167-
}
168-
(remote_sync_handler, remote_sync_outbox, remote_sync_inbox) = sync::object_store_sync().await;
169-
}
143+
let result = self.start(shutdown_rx, prometheus.clone(), None).await;
144+
// Cancel sync jobs
145+
cancel_tx.send(()).expect("Cancellation should not fail");
170146

171-
};
172-
}
147+
result
173148
}
174149
}
175150

0 commit comments

Comments
 (0)