Skip to content

Commit afca4ba

Browse files
author
Devdutt Shenoi
committed
Merge remote-tracking branch 'origin/main' into ingest
2 parents f697da8 + 5294779 commit afca4ba

File tree

9 files changed

+31
-31
lines changed

9 files changed

+31
-31
lines changed

.github/workflows/build.yaml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@ on:
55
- "helm/**"
66
- "assets/**"
77
- "**.md"
8-
push:
9-
branches:
10-
- main
118

129
name: Ensure parseable builds on all release targets
1310
jobs:

.github/workflows/coverage.yaml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@ on:
55
- "helm/**"
66
- "assets/**"
77
- "**.md"
8-
push:
9-
branches:
10-
- main
118

129
name: Lint, Test and Coverage Report
1310
jobs:

Dockerfile.debug

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ WORKDIR /parseable
2525

2626
# Cache dependencies
2727
COPY Cargo.toml Cargo.lock build.rs .git ./
28-
RUN mkdir src && echo "fn main() {}" > src/main.rs && cargo build --release && rm -rf src
28+
RUN mkdir src && echo "fn main() {}" > src/main.rs && cargo build && rm -rf src
2929

3030
# Build the actual binary
3131
COPY src ./src

src/handlers/http/logstream.rs

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -588,8 +588,8 @@ pub async fn get_stream_info(stream_name: Path<String>) -> Result<impl Responder
588588
.get(&stream_name)
589589
.ok_or(StreamError::StreamNotFound(stream_name.clone()))?;
590590

591-
let stream_info: StreamInfo = StreamInfo {
592-
stream_type: stream_meta.stream_type.clone(),
591+
let stream_info = StreamInfo {
592+
stream_type: stream_meta.stream_type,
593593
created_at: stream_meta.created_at.clone(),
594594
first_event_at: stream_first_event_at,
595595
time_partition: stream_meta.time_partition.clone(),
@@ -626,8 +626,10 @@ pub async fn put_stream_hot_tier(
626626
}
627627
}
628628

629-
if PARSEABLE.streams.stream_type(&stream_name).unwrap()
630-
== Some(StreamType::Internal.to_string())
629+
if PARSEABLE
630+
.streams
631+
.stream_type(&stream_name)
632+
.is_ok_and(|t| t == StreamType::Internal)
631633
{
632634
return Err(StreamError::Custom {
633635
msg: "Hot tier can not be updated for internal stream".to_string(),
@@ -717,8 +719,10 @@ pub async fn delete_stream_hot_tier(
717719
return Err(StreamError::HotTierNotEnabled(stream_name));
718720
};
719721

720-
if PARSEABLE.streams.stream_type(&stream_name).unwrap()
721-
== Some(StreamType::Internal.to_string())
722+
if PARSEABLE
723+
.streams
724+
.stream_type(&stream_name)
725+
.is_ok_and(|t| t == StreamType::Internal)
722726
{
723727
return Err(StreamError::Custom {
724728
msg: "Hot tier can not be deleted for internal stream".to_string(),

src/handlers/http/modal/query/querier_logstream.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,8 +166,10 @@ pub async fn get_stats(
166166
let stats = stats::get_current_stats(&stream_name, "json")
167167
.ok_or(StreamError::StreamNotFound(stream_name.clone()))?;
168168

169-
let ingestor_stats = if PARSEABLE.streams.stream_type(&stream_name).unwrap()
170-
== Some(StreamType::UserDefined.to_string())
169+
let ingestor_stats = if PARSEABLE
170+
.streams
171+
.stream_type(&stream_name)
172+
.is_ok_and(|t| t == StreamType::Internal)
171173
{
172174
Some(fetch_stats_from_ingestors(&stream_name).await?)
173175
} else {

src/metadata.rs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ pub struct LogStreamMetadata {
6969
pub custom_partition: Option<String>,
7070
pub static_schema_flag: bool,
7171
pub hot_tier_enabled: bool,
72-
pub stream_type: Option<String>,
72+
pub stream_type: StreamType,
7373
pub log_source: LogSource,
7474
}
7575

@@ -326,7 +326,7 @@ impl StreamInfo {
326326
} else {
327327
static_schema
328328
},
329-
stream_type: Some(stream_type.to_string()),
329+
stream_type,
330330
schema_version,
331331
log_source,
332332
..Default::default()
@@ -351,16 +351,17 @@ impl StreamInfo {
351351
self.read()
352352
.expect(LOCK_EXPECT)
353353
.iter()
354-
.filter(|(_, v)| v.stream_type.clone().unwrap() == StreamType::Internal.to_string())
354+
.filter(|(_, v)| v.stream_type == StreamType::Internal)
355355
.map(|(k, _)| k.clone())
356356
.collect()
357357
}
358358

359-
pub fn stream_type(&self, stream_name: &str) -> Result<Option<String>, MetadataError> {
360-
let map = self.read().expect(LOCK_EXPECT);
361-
map.get(stream_name)
359+
pub fn stream_type(&self, stream_name: &str) -> Result<StreamType, MetadataError> {
360+
self.read()
361+
.expect(LOCK_EXPECT)
362+
.get(stream_name)
362363
.ok_or(MetadataError::StreamMetaNotFound(stream_name.to_string()))
363-
.map(|metadata| metadata.stream_type.clone())
364+
.map(|metadata| metadata.stream_type)
364365
}
365366

366367
pub fn update_stats(

src/parseable.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use crate::{
1717
option::Mode,
1818
storage::{
1919
object_storage::parseable_json_path, LogStream, ObjectStorageError, ObjectStorageProvider,
20-
ObjectStoreFormat, StreamType,
20+
ObjectStoreFormat,
2121
},
2222
};
2323

@@ -190,10 +190,7 @@ impl Parseable {
190190
.and_then(|limit| limit.parse().ok());
191191
let custom_partition = stream_metadata.custom_partition.as_deref().unwrap_or("");
192192
let static_schema_flag = stream_metadata.static_schema_flag;
193-
let stream_type = stream_metadata
194-
.stream_type
195-
.map(|s| StreamType::from(s.as_str()))
196-
.unwrap_or_default();
193+
let stream_type = stream_metadata.stream_type;
197194
let schema_version = stream_metadata.schema_version;
198195
let log_source = stream_metadata.log_source;
199196
self.streams.add_stream(

src/storage/mod.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,8 @@ pub struct ObjectStoreFormat {
115115
pub static_schema_flag: bool,
116116
#[serde(default)]
117117
pub hot_tier_enabled: bool,
118-
pub stream_type: Option<String>,
118+
#[serde(default)]
119+
pub stream_type: StreamType,
119120
#[serde(default)]
120121
pub log_source: LogSource,
121122
}
@@ -140,7 +141,8 @@ pub struct StreamInfo {
140141
skip_serializing_if = "std::ops::Not::not"
141142
)]
142143
pub static_schema_flag: bool,
143-
pub stream_type: Option<String>,
144+
#[serde(default)]
145+
pub stream_type: StreamType,
144146
pub log_source: LogSource,
145147
}
146148

@@ -205,7 +207,7 @@ impl Default for ObjectStoreFormat {
205207
version: CURRENT_SCHEMA_VERSION.to_string(),
206208
schema_version: SchemaVersion::V1, // Newly created streams should be v1
207209
objectstore_format: CURRENT_OBJECT_STORE_VERSION.to_string(),
208-
stream_type: Some(StreamType::UserDefined.to_string()),
210+
stream_type: StreamType::UserDefined,
209211
created_at: Local::now().to_rfc3339(),
210212
first_event_at: None,
211213
owner: Owner::new("".to_string(), "".to_string()),

src/storage/object_storage.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ pub trait ObjectStorage: Debug + Send + Sync + 'static {
161161
let format = ObjectStoreFormat {
162162
created_at: Local::now().to_rfc3339(),
163163
permissions: vec![Permisssion::new(PARSEABLE.options.username.clone())],
164-
stream_type: Some(stream_type.to_string()),
164+
stream_type,
165165
time_partition: (!time_partition.is_empty()).then(|| time_partition.to_string()),
166166
time_partition_limit: time_partition_limit.map(|limit| limit.to_string()),
167167
custom_partition: (!custom_partition.is_empty()).then(|| custom_partition.to_string()),

0 commit comments

Comments
 (0)