|
| 1 | +//! Demonstrates basic usage of the dataframe APIs. |
| 2 | +
|
| 3 | +use itertools::Itertools; |
| 4 | + |
| 5 | +use rerun::{ |
| 6 | + dataframe::{ |
| 7 | + concatenate_record_batches, EntityPathFilter, QueryCache, QueryEngine, QueryExpression, |
| 8 | + SparseFillStrategy, Timeline, |
| 9 | + }, |
| 10 | + ChunkStore, ChunkStoreConfig, StoreKind, VersionPolicy, |
| 11 | +}; |
| 12 | + |
| 13 | +fn main() -> Result<(), Box<dyn std::error::Error>> { |
| 14 | + let args = std::env::args().collect_vec(); |
| 15 | + |
| 16 | + let get_arg = |i| { |
| 17 | + let Some(value) = args.get(i) else { |
| 18 | + let bin_name = args.first().map_or("$BIN", |s| s.as_str()); |
| 19 | + eprintln!( |
| 20 | + "{}", |
| 21 | + unindent::unindent(&format!( |
| 22 | + "\ |
| 23 | + Usage: {bin_name} <path_to_rrd> [entity_path_filter] |
| 24 | +
|
| 25 | + This example will query for the first 10 rows of data in your recording of choice, |
| 26 | + and display the results as a table in your terminal. |
| 27 | +
|
| 28 | + You can use one of your recordings, or grab one from our hosted examples, e.g.: |
| 29 | + curl 'https://app.rerun.io/version/latest/examples/dna.rrd' -o - > /tmp/dna.rrd |
| 30 | +
|
| 31 | + The results can be filtered further by specifying an entity filter expression: |
| 32 | + {bin_name} my_recording.rrd /helix/structure/**\ |
| 33 | + ", |
| 34 | + )), |
| 35 | + ); |
| 36 | + std::process::exit(1); |
| 37 | + }; |
| 38 | + value |
| 39 | + }; |
| 40 | + |
| 41 | + let path_to_rrd = get_arg(1); |
| 42 | + let entity_path_filter = EntityPathFilter::try_from(args.get(2).map_or("/**", |s| s.as_str()))?; |
| 43 | + let timeline = Timeline::log_time(); |
| 44 | + |
| 45 | + let stores = ChunkStore::from_rrd_filepath( |
| 46 | + &ChunkStoreConfig::DEFAULT, |
| 47 | + path_to_rrd, |
| 48 | + VersionPolicy::Warn, |
| 49 | + )?; |
| 50 | + |
| 51 | + for (store_id, store) in &stores { |
| 52 | + if store_id.kind != StoreKind::Recording { |
| 53 | + continue; |
| 54 | + } |
| 55 | + |
| 56 | + let query_cache = QueryCache::new(store); |
| 57 | + let query_engine = QueryEngine { |
| 58 | + store, |
| 59 | + cache: &query_cache, |
| 60 | + }; |
| 61 | + |
| 62 | + let query = QueryExpression { |
| 63 | + filtered_index: Some(timeline), |
| 64 | + view_contents: Some( |
| 65 | + query_engine |
| 66 | + .iter_entity_paths(&entity_path_filter) |
| 67 | + .map(|entity_path| (entity_path, None)) |
| 68 | + .collect(), |
| 69 | + ), |
| 70 | + sparse_fill_strategy: SparseFillStrategy::LatestAtGlobal, |
| 71 | + ..Default::default() |
| 72 | + }; |
| 73 | + |
| 74 | + let query_handle = query_engine.query(query.clone()); |
| 75 | + let record_batches = query_handle.batch_iter().take(10).collect_vec(); |
| 76 | + |
| 77 | + let table = concatenate_record_batches(query_handle.schema().clone(), &record_batches)?; |
| 78 | + println!("{table}"); |
| 79 | + } |
| 80 | + |
| 81 | + Ok(()) |
| 82 | +} |
0 commit comments