Merge "Project import generated by Copybara."
diff --git a/Android.bp b/Android.bp
index 4265016..ba9062f 100644
--- a/Android.bp
+++ b/Android.bp
@@ -9401,7 +9401,7 @@
     srcs: [
         "src/trace_processor/db/column.cc",
         "src/trace_processor/db/column_storage.cc",
-        "src/trace_processor/db/null_overlay.cc",
+        "src/trace_processor/db/query_executor.cc",
         "src/trace_processor/db/table.cc",
         "src/trace_processor/db/view.cc",
     ],
@@ -9449,6 +9449,7 @@
     srcs: [
         "src/trace_processor/db/column_storage_overlay_unittest.cc",
         "src/trace_processor/db/compare_unittest.cc",
+        "src/trace_processor/db/query_executor_unittest.cc",
         "src/trace_processor/db/view_unittest.cc",
     ],
 }
diff --git a/BUILD b/BUILD
index f217e0a..2a5369f 100644
--- a/BUILD
+++ b/BUILD
@@ -1310,8 +1310,8 @@
         "src/trace_processor/db/column_storage.h",
         "src/trace_processor/db/column_storage_overlay.h",
         "src/trace_processor/db/compare.h",
-        "src/trace_processor/db/null_overlay.cc",
-        "src/trace_processor/db/null_overlay.h",
+        "src/trace_processor/db/query_executor.cc",
+        "src/trace_processor/db/query_executor.h",
         "src/trace_processor/db/table.cc",
         "src/trace_processor/db/table.h",
         "src/trace_processor/db/typed_column.h",
diff --git a/include/perfetto/trace_processor/basic_types.h b/include/perfetto/trace_processor/basic_types.h
index ede17e1..9726bbb 100644
--- a/include/perfetto/trace_processor/basic_types.h
+++ b/include/perfetto/trace_processor/basic_types.h
@@ -23,6 +23,7 @@
 #include <stdint.h>
 #include <functional>
 #include <string>
+#include <unordered_map>
 #include <utility>
 #include <vector>
 
@@ -149,7 +150,14 @@
 
   // When set to true, trace processor will be augmented with a bunch of helpful
   // features for local development such as extra SQL fuctions.
+  //
+  // Note that the features behind this flag are subject to breakage without
+  // backward compability guarantees at any time.
   bool enable_dev_features = false;
+
+  // Sets developer-only flags to the provided values. Does not have any affect
+  // unless |enable_dev_features| = true.
+  std::unordered_map<std::string, std::string> dev_flags;
 };
 
 // Represents a dynamically typed value returned by SQL.
diff --git a/include/perfetto/tracing/internal/data_source_type.h b/include/perfetto/tracing/internal/data_source_type.h
index 73c84be..f29798e 100644
--- a/include/perfetto/tracing/internal/data_source_type.h
+++ b/include/perfetto/tracing/internal/data_source_type.h
@@ -182,9 +182,10 @@
   // `TracePointTraits` and `trace_point_data` are customization point for
   // getting the active instances bitmap.
   template <typename TracePointTraits>
-  void NextIteration(InstancesIterator* iterator,
-                     DataSourceThreadLocalState* tls_state,
-                     typename TracePointTraits::TracePointData trace_point_data) {
+  void NextIteration(
+      InstancesIterator* iterator,
+      DataSourceThreadLocalState* tls_state,
+      typename TracePointTraits::TracePointData trace_point_data) {
     iterator->i++;
     FirstActiveInstance<TracePointTraits>(iterator, tls_state,
                                           trace_point_data);
diff --git a/python/generators/diff_tests/runner.py b/python/generators/diff_tests/runner.py
index 7cb316a..4aa0286 100644
--- a/python/generators/diff_tests/runner.py
+++ b/python/generators/diff_tests/runner.py
@@ -239,7 +239,7 @@
                       stderr.decode('utf8'), tp.returncode, perf_lines)
 
   # Run a query based Diff Test.
-  def __run_query_test(self, trace_path: str) -> TestResult:
+  def __run_query_test(self, trace_path: str, keep_query: bool) -> TestResult:
     # Fetch expected text.
     if self.test.expected_path:
       with open(self.test.expected_path, 'r') as expected_file:
@@ -274,7 +274,7 @@
         env=get_env(ROOT_DIR))
     (stdout, stderr) = tp.communicate()
 
-    if not self.test.blueprint.is_query_file():
+    if not self.test.blueprint.is_query_file() and not keep_query:
       tmp_query_file.close()
       os.remove(tmp_query_file.name)
     perf_lines = [line.decode('utf8') for line in tmp_perf_file.readlines()]
@@ -330,7 +330,7 @@
     str = f"{self.colors.yellow('[ RUN      ]')} {self.test.name}\n"
 
     if self.test.type == TestType.QUERY:
-      result = self.__run_query_test(trace_path)
+      result = self.__run_query_test(trace_path, keep_input)
     elif self.test.type == TestType.METRIC:
       result = self.__run_metrics_test(
           trace_path,
@@ -348,7 +348,8 @@
 
     def write_cmdlines():
       res = ""
-      if not gen_trace_file:
+      if self.test.trace_path and (self.test.trace_path.endswith('.textproto')
+                                   or self.test.trace_path.endswith('.py')):
         res += 'Command to generate trace:\n'
         res += 'tools/serialize_test_trace.py '
         res += '--descriptor {} {} > {}\n'.format(
diff --git a/python/tools/record_android_trace.py b/python/tools/record_android_trace.py
index 9256bd8..4d8ac2b 100644
--- a/python/tools/record_android_trace.py
+++ b/python/tools/record_android_trace.py
@@ -64,12 +64,17 @@
 class HttpHandler(http.server.SimpleHTTPRequestHandler):
 
   def end_headers(self):
-    self.send_header('Access-Control-Allow-Origin', '*')
-    return super().end_headers()
+    self.send_header('Access-Control-Allow-Origin', self.server.allow_origin)
+    self.send_header('Cache-Control', 'no-cache')
+    super().end_headers()
 
   def do_GET(self):
-    self.server.last_request = self.path
-    return super().do_GET()
+    if self.path != '/' + self.server.expected_fname:
+      self.send_error(404, "File not found")
+      return
+
+    self.server.fname_get_completed = True
+    super().do_GET()
 
   def do_POST(self):
     self.send_error(404, "File not found")
@@ -95,9 +100,15 @@
   help = 'Output file or directory (default: %s)' % default_out_dir_str
   parser.add_argument('-o', '--out', default=default_out_dir, help=help)
 
-  help = 'Don\'t open in the browser'
+  help = 'Don\'t open or serve the trace'
   parser.add_argument('-n', '--no-open', action='store_true', help=help)
 
+  help = 'Don\'t open in browser, but still serve trace (good for remote use)'
+  parser.add_argument('--no-open-browser', action='store_true', help=help)
+
+  help = 'The web address used to open trace files'
+  parser.add_argument('--origin', default='https://ui.perfetto.dev', help=help)
+
   help = 'Force the use of the sideloaded binaries rather than system daemons'
   parser.add_argument('--sideload', action='store_true', help=help)
 
@@ -148,6 +159,9 @@
   help = 'Can be generated with https://ui.perfetto.dev/#!/record'
   grp.add_argument('-c', '--config', default=None, help=help)
 
+  help = 'Parse input from --config as binary proto (default: parse as text)'
+  grp.add_argument('--bin', action='store_true', help=help)
+
   args = parser.parse_args()
   args.sideload = args.sideload or args.sideload_path is not None
 
@@ -220,7 +234,9 @@
   fname = '%s-%s.pftrace' % (tstamp, os.urandom(3).hex())
   device_file = device_dir + fname
 
-  cmd = [perfetto_cmd, '--background', '--txt', '-o', device_file]
+  cmd = [perfetto_cmd, '--background', '-o', device_file]
+  if not args.bin:
+    cmd.append('--txt')
   on_device_config = None
   on_host_config = None
   if args.config is not None:
@@ -340,7 +356,8 @@
   if not args.no_open:
     prt('\n')
     prt('Opening the trace (%s) in the browser' % host_file)
-    open_trace_in_browser(host_file)
+    open_browser = not args.no_open_browser
+    open_trace_in_browser(host_file, open_browser, args.origin)
 
 
 def prt(msg, colors=ANSI.END):
@@ -368,17 +385,24 @@
     sys.exit(1)
 
 
-def open_trace_in_browser(path):
+def open_trace_in_browser(path, open_browser, origin):
   # We reuse the HTTP+RPC port because it's the only one allowed by the CSP.
   PORT = 9001
+  path = os.path.abspath(path)
   os.chdir(os.path.dirname(path))
   fname = os.path.basename(path)
   socketserver.TCPServer.allow_reuse_address = True
   with socketserver.TCPServer(('127.0.0.1', PORT), HttpHandler) as httpd:
-    webbrowser.open_new_tab(
-        'https://ui.perfetto.dev/#!/?url=http://127.0.0.1:%d/%s' %
-        (PORT, fname))
-    while httpd.__dict__.get('last_request') != '/' + fname:
+    address = f'{origin}/#!/?url=http://127.0.0.1:{PORT}/{fname}'
+    if open_browser:
+      webbrowser.open_new_tab(address)
+    else:
+      print(f'Open URL in browser: {address}')
+
+    httpd.expected_fname = fname
+    httpd.fname_get_completed = None
+    httpd.allow_origin = origin
+    while httpd.fname_get_completed is None:
       httpd.handle_request()
 
 
diff --git a/src/profiling/symbolizer/breakpad_symbolizer.cc b/src/profiling/symbolizer/breakpad_symbolizer.cc
index 763d34f..df26bb8 100644
--- a/src/profiling/symbolizer/breakpad_symbolizer.cc
+++ b/src/profiling/symbolizer/breakpad_symbolizer.cc
@@ -64,11 +64,12 @@
   size_t num_symbolized_frames = 0;
   result.reserve(address.size());
   std::string file_path;
+  std::string raw_build_id = base::ToHex(build_id.c_str(), build_id.length());
 
   // Check to see if the |file_path_for_testing_| member is populated. If it is,
   // this file must be used.
   if (file_path_for_testing_.empty()) {
-    file_path = MakeFilePath(build_id, symbol_dir_path_).c_str();
+    file_path = MakeFilePath(raw_build_id, symbol_dir_path_).c_str();
   } else {
     file_path = file_path_for_testing_;
   }
diff --git a/src/profiling/symbolizer/breakpad_symbolizer.h b/src/profiling/symbolizer/breakpad_symbolizer.h
index 89c5ec5..394a66b 100644
--- a/src/profiling/symbolizer/breakpad_symbolizer.h
+++ b/src/profiling/symbolizer/breakpad_symbolizer.h
@@ -47,8 +47,6 @@
     file_path_for_testing_ = path;
   }
 
-  bool BuildIdNeedsHexConversion() override { return false; }
-
  private:
   std::string symbol_dir_path_;
   std::string file_path_for_testing_;
diff --git a/src/profiling/symbolizer/local_symbolizer.h b/src/profiling/symbolizer/local_symbolizer.h
index f92c553..8c0b769 100644
--- a/src/profiling/symbolizer/local_symbolizer.h
+++ b/src/profiling/symbolizer/local_symbolizer.h
@@ -108,8 +108,6 @@
       uint64_t load_bias,
       const std::vector<uint64_t>& address) override;
 
-  bool BuildIdNeedsHexConversion() override { return true; }
-
   ~LocalSymbolizer() override;
 
  private:
diff --git a/src/profiling/symbolizer/symbolize_database.cc b/src/profiling/symbolizer/symbolize_database.cc
index b7d7cfb..36c0e0d 100644
--- a/src/profiling/symbolizer/symbolize_database.cc
+++ b/src/profiling/symbolizer/symbolize_database.cc
@@ -83,8 +83,7 @@
 }
 
 std::map<UnsymbolizedMapping, std::vector<uint64_t>> GetUnsymbolizedFrames(
-    trace_processor::TraceProcessor* tp,
-    bool convert_build_id_to_bytes) {
+    trace_processor::TraceProcessor* tp) {
   std::map<UnsymbolizedMapping, std::vector<uint64_t>> res;
   Iterator it = tp->ExecuteQuery(kQueryUnsymbolized);
   while (it.Next()) {
@@ -94,8 +93,7 @@
     // TODO(b/148109467): Remove workaround once all active Chrome versions
     // write raw bytes instead of a string as build_id.
     std::string raw_build_id = it.Get(1).AsString();
-    if (convert_build_id_to_bytes &&
-        !trace_processor::util::IsHexModuleId(base::StringView(raw_build_id))) {
+    if (!trace_processor::util::IsHexModuleId(base::StringView(raw_build_id))) {
       build_id = FromHex(raw_build_id);
     } else {
       build_id = raw_build_id;
@@ -118,8 +116,7 @@
                        Symbolizer* symbolizer,
                        std::function<void(const std::string&)> callback) {
   PERFETTO_CHECK(symbolizer);
-  auto unsymbolized =
-      GetUnsymbolizedFrames(tp, symbolizer->BuildIdNeedsHexConversion());
+  auto unsymbolized = GetUnsymbolizedFrames(tp);
   for (auto it = unsymbolized.cbegin(); it != unsymbolized.cend(); ++it) {
     const auto& unsymbolized_mapping = it->first;
     const std::vector<uint64_t>& rel_pcs = it->second;
diff --git a/src/profiling/symbolizer/symbolizer.h b/src/profiling/symbolizer/symbolizer.h
index 85c3f91..8d50590 100644
--- a/src/profiling/symbolizer/symbolizer.h
+++ b/src/profiling/symbolizer/symbolizer.h
@@ -43,12 +43,6 @@
       uint64_t load_bias,
       const std::vector<uint64_t>& address) = 0;
   virtual ~Symbolizer();
-
-  // LocalSymbolizer uses a specific conversion of a symbol file's |build_id| to
-  // bytes, but BreakpadSymbolizer requires the |build_id| as given. Return true
-  // if the |build_id| passed to Symbolize() requires the conversion to bytes
-  // and false otherwise.
-  virtual bool BuildIdNeedsHexConversion() = 0;
 };
 
 }  // namespace profiling
diff --git a/src/trace_processor/containers/bit_vector.cc b/src/trace_processor/containers/bit_vector.cc
index 3e3adab..05be6bf 100644
--- a/src/trace_processor/containers/bit_vector.cc
+++ b/src/trace_processor/containers/bit_vector.cc
@@ -189,6 +189,30 @@
   return std::move(builder).Build();
 }
 
+void BitVector::Or(const BitVector& sec) {
+  PERFETTO_CHECK(size_ == sec.size());
+  for (uint32_t i = 0; i < words_.size(); ++i) {
+    BitWord(&words_[i]).Or(sec.words_[i]);
+  }
+
+  for (uint32_t i = 1; i < counts_.size(); ++i) {
+    counts_[i] = counts_[i - 1] +
+                 ConstBlock(&words_[Block::kWords * (i - 1)]).CountSetBits();
+  }
+}
+
+void BitVector::And(const BitVector& sec) {
+  Resize(std::min(size_, sec.size_));
+  for (uint32_t i = 0; i < words_.size(); ++i) {
+    BitWord(&words_[i]).And(sec.words_[i]);
+  }
+
+  for (uint32_t i = 1; i < counts_.size(); ++i) {
+    counts_[i] = counts_[i - 1] +
+                 ConstBlock(&words_[Block::kWords * (i - 1)]).CountSetBits();
+  }
+}
+
 void BitVector::UpdateSetBits(const BitVector& update) {
   if (update.CountSetBits() == 0 || CountSetBits() == 0) {
     *this = BitVector();
@@ -285,25 +309,20 @@
 
 BitVector BitVector::IntersectRange(uint32_t range_start,
                                     uint32_t range_end) const {
-  uint32_t total_set_bits = CountSetBits();
-  if (total_set_bits == 0 || range_start >= range_end)
-    return BitVector();
-
   // We should skip all bits until the index of first set bit bigger than
   // |range_start|.
-  uint32_t start_idx = std::max(range_start, IndexOfNthSet(0));
   uint32_t end_idx = std::min(range_end, size());
 
-  if (start_idx >= end_idx)
+  if (range_start >= end_idx)
     return BitVector();
 
   Builder builder(end_idx);
 
   // All bits before start should be empty.
-  builder.Skip(start_idx);
+  builder.Skip(range_start);
 
   uint32_t front_bits = builder.BitsUntilWordBoundaryOrFull();
-  uint32_t cur_index = start_idx;
+  uint32_t cur_index = range_start;
   for (uint32_t i = 0; i < front_bits; ++i, ++cur_index) {
     builder.Append(IsSet(cur_index));
   }
diff --git a/src/trace_processor/containers/bit_vector.h b/src/trace_processor/containers/bit_vector.h
index a9a5fbd..cabb513 100644
--- a/src/trace_processor/containers/bit_vector.h
+++ b/src/trace_processor/containers/bit_vector.h
@@ -84,6 +84,9 @@
 
     // Creates a BitVector from this Builder.
     BitVector Build() && {
+      if (size_ == 0)
+        return BitVector();
+
       Address addr = IndexToAddress(size_ - 1);
       uint32_t no_blocks = addr.block_idx + 1;
       std::vector<uint32_t> counts(no_blocks);
@@ -148,6 +151,12 @@
   // Create a bitwise Not copy of the bitvector.
   BitVector Not() const;
 
+  // Bitwise Or of the bitvector.
+  void Or(const BitVector&);
+
+  // Bitwise Or of the bitvector.
+  void And(const BitVector&);
+
   // Returns the size of the bitvector.
   uint32_t size() const { return static_cast<uint32_t>(size_); }
 
@@ -418,6 +427,9 @@
     // Bitwise ors the given |mask| to the current value.
     void Or(uint64_t mask) { *word_ |= mask; }
 
+    // Bitwise ands the given |mask| to the current value.
+    void And(uint64_t mask) { *word_ &= mask; }
+
     // Sets the bit at the given index to true.
     void Set(uint32_t idx) {
       PERFETTO_DCHECK(idx < kBits);
@@ -666,6 +678,12 @@
       BitWord(&start_word_[end.word_idx]).Set(0, end.bit_idx);
     }
 
+    void Or(Block& sec) {
+      for (uint32_t i = 0; i < kWords; ++i) {
+        BitWord(&start_word_[i]).Or(sec.start_word_[i]);
+      }
+    }
+
     template <typename Filler>
     void FromFiller(uint32_t offset, Filler f) {
       // We choose to iterate the bits as the outer loop as this allows us
diff --git a/src/trace_processor/containers/bit_vector_unittest.cc b/src/trace_processor/containers/bit_vector_unittest.cc
index dd8c951..fd17424 100644
--- a/src/trace_processor/containers/bit_vector_unittest.cc
+++ b/src/trace_processor/containers/bit_vector_unittest.cc
@@ -645,6 +645,30 @@
   EXPECT_EQ(not_bv.CountSetBits(), 9u);
 }
 
+TEST(BitVectorUnittest, Or) {
+  BitVector bv{1, 1, 0, 0};
+  BitVector bv_second{1, 0, 1, 0};
+  bv.Or(bv_second);
+
+  ASSERT_EQ(bv.CountSetBits(), 3u);
+  ASSERT_TRUE(bv.Set(0));
+  ASSERT_TRUE(bv.Set(1));
+  ASSERT_TRUE(bv.Set(2));
+}
+
+TEST(BitVectorUnittest, OrBig) {
+  BitVector bv =
+      BitVector::Range(0, 1025, [](uint32_t i) { return i % 5 == 0; });
+  BitVector bv_sec =
+      BitVector::Range(0, 1025, [](uint32_t i) { return i % 3 == 0; });
+  bv.Or(bv_sec);
+
+  BitVector bv_or = BitVector::Range(
+      0, 1025, [](uint32_t i) { return i % 5 == 0 || i % 3 == 0; });
+
+  ASSERT_EQ(bv.CountSetBits(), bv_or.CountSetBits());
+}
+
 TEST(BitVectorUnittest, QueryStressTest) {
   BitVector bv;
   std::vector<bool> bool_vec;
diff --git a/src/trace_processor/containers/row_map.cc b/src/trace_processor/containers/row_map.cc
index d00d487..e903c8f 100644
--- a/src/trace_processor/containers/row_map.cc
+++ b/src/trace_processor/containers/row_map.cc
@@ -150,10 +150,7 @@
 }
 
 Variant IntersectInternal(BitVector& first, const BitVector& second) {
-  for (auto set_bit = first.IterateSetBits(); set_bit; set_bit.Next()) {
-    if (!second.IsSet(set_bit.index()))
-      set_bit.Clear();
-  }
+  first.And(second);
   return std::move(first);
 }
 
diff --git a/src/trace_processor/containers/row_map.h b/src/trace_processor/containers/row_map.h
index 283455c..45eedf5 100644
--- a/src/trace_processor/containers/row_map.h
+++ b/src/trace_processor/containers/row_map.h
@@ -20,6 +20,7 @@
 #include <stdint.h>
 
 #include <memory>
+#include <numeric>
 #include <optional>
 #include <variant>
 #include <vector>
@@ -429,6 +430,27 @@
     NoVariantMatched();
   }
 
+  // Converts this RowMap to an index vector in the most efficient way
+  // possible.
+  std::vector<uint32_t> TakeAsIndexVector() const&& {
+    if (auto* range = std::get_if<Range>(&data_)) {
+      std::vector<uint32_t> rm(range->size());
+      std::iota(rm.begin(), rm.end(), range->start);
+      return rm;
+    }
+    if (auto* bv = std::get_if<BitVector>(&data_)) {
+      std::vector<uint32_t> rm(bv->CountSetBits());
+      for (auto it = bv->IterateSetBits(); it; it.Next()) {
+        rm[it.ordinal()] = it.index();
+      }
+      return rm;
+    }
+    if (auto* vec = std::get_if<IndexVector>(&data_)) {
+      return std::move(*vec);
+    }
+    NoVariantMatched();
+  }
+
   // Returns the iterator over the rows in this RowMap.
   Iterator IterateRows() const { return Iterator(this); }
 
diff --git a/src/trace_processor/db/BUILD.gn b/src/trace_processor/db/BUILD.gn
index 1ef081a..24d228f 100644
--- a/src/trace_processor/db/BUILD.gn
+++ b/src/trace_processor/db/BUILD.gn
@@ -24,8 +24,8 @@
     "column_storage.h",
     "column_storage_overlay.h",
     "compare.h",
-    "null_overlay.cc",
-    "null_overlay.h",
+    "query_executor.cc",
+    "query_executor.h",
     "table.cc",
     "table.h",
     "typed_column.h",
@@ -54,6 +54,7 @@
   sources = [
     "column_storage_overlay_unittest.cc",
     "compare_unittest.cc",
+    "query_executor_unittest.cc",
     "view_unittest.cc",
   ]
   deps = [
@@ -64,6 +65,8 @@
     "../../base",
     "../tables",
     "../views",
+    "overlays",
+    "storage",
   ]
 }
 
diff --git a/src/trace_processor/db/column.h b/src/trace_processor/db/column.h
index 66b273d..1f567de 100644
--- a/src/trace_processor/db/column.h
+++ b/src/trace_processor/db/column.h
@@ -414,6 +414,8 @@
     return *static_cast<ColumnStorage<stored_type<T>>*>(storage_);
   }
 
+  const ColumnStorageBase& storage_base() const { return *storage_; }
+
  protected:
   // Returns the backing sparse vector cast to contain data of type T.
   // Should only be called when |type_| == ToColumnType<T>().
diff --git a/src/trace_processor/db/column_storage.h b/src/trace_processor/db/column_storage.h
index 7e6a609..43ed387 100644
--- a/src/trace_processor/db/column_storage.h
+++ b/src/trace_processor/db/column_storage.h
@@ -17,6 +17,7 @@
 #ifndef SRC_TRACE_PROCESSOR_DB_COLUMN_STORAGE_H_
 #define SRC_TRACE_PROCESSOR_DB_COLUMN_STORAGE_H_
 
+#include "src/trace_processor/containers/bit_vector.h"
 #include "src/trace_processor/containers/nullable_vector.h"
 
 namespace perfetto {
@@ -34,11 +35,16 @@
 
   ColumnStorageBase(ColumnStorageBase&&) = default;
   ColumnStorageBase& operator=(ColumnStorageBase&&) noexcept = default;
+
+  virtual const void* data() const = 0;
+  virtual const BitVector* bv() const = 0;
+  virtual uint32_t size() const = 0;
+  virtual uint32_t non_null_size() const = 0;
 };
 
 // Class used for implementing storage for non-null columns.
 template <typename T>
-class ColumnStorage : public ColumnStorageBase {
+class ColumnStorage final : public ColumnStorageBase {
  public:
   ColumnStorage() = default;
 
@@ -51,10 +57,14 @@
   T Get(uint32_t idx) const { return vector_[idx]; }
   void Append(T val) { vector_.emplace_back(val); }
   void Set(uint32_t idx, T val) { vector_[idx] = val; }
-  uint32_t size() const { return static_cast<uint32_t>(vector_.size()); }
   void ShrinkToFit() { vector_.shrink_to_fit(); }
   const std::vector<T>& vector() const { return vector_; }
 
+  const void* data() const final { return vector_.data(); }
+  const BitVector* bv() const final { return nullptr; }
+  uint32_t size() const final { return static_cast<uint32_t>(vector_.size()); }
+  uint32_t non_null_size() const final { return size(); }
+
   template <bool IsDense>
   static ColumnStorage<T> Create() {
     static_assert(!IsDense, "Invalid for non-null storage to be dense.");
@@ -67,7 +77,7 @@
 
 // Class used for implementing storage for nullable columns.
 template <typename T>
-class ColumnStorage<std::optional<T>> : public ColumnStorageBase {
+class ColumnStorage<std::optional<T>> final : public ColumnStorageBase {
  public:
   ColumnStorage() = default;
 
@@ -81,7 +91,6 @@
   void Append(T val) { nv_.Append(val); }
   void Append(std::optional<T> val) { nv_.Append(std::move(val)); }
   void Set(uint32_t idx, T val) { nv_.Set(idx, val); }
-  uint32_t size() const { return nv_.size(); }
   bool IsDense() const { return nv_.IsDense(); }
   void ShrinkToFit() { nv_.ShrinkToFit(); }
   // For dense columns the size of the vector is equal to size of the bit
@@ -93,6 +102,13 @@
     return nv_.non_null_bit_vector();
   }
 
+  const void* data() const final { return nv_.non_null_vector().data(); }
+  const BitVector* bv() const final { return &nv_.non_null_bit_vector(); }
+  uint32_t size() const final { return nv_.size(); }
+  uint32_t non_null_size() const final {
+    return static_cast<uint32_t>(nv_.non_null_vector().size());
+  }
+
   template <bool IsDense>
   static ColumnStorage<std::optional<T>> Create() {
     return IsDense
diff --git a/src/trace_processor/db/column_storage_overlay.h b/src/trace_processor/db/column_storage_overlay.h
index 997d4be..6ff16ca 100644
--- a/src/trace_processor/db/column_storage_overlay.h
+++ b/src/trace_processor/db/column_storage_overlay.h
@@ -199,6 +199,8 @@
   // Returns the iterator over the rows in this ColumnStorageOverlay.
   Iterator IterateRows() const { return Iterator(row_map_.IterateRows()); }
 
+  const RowMap& row_map() const { return row_map_; }
+
  private:
   explicit ColumnStorageOverlay(RowMap rm) : row_map_(std::move(rm)) {}
 
diff --git a/src/trace_processor/db/null_overlay.cc b/src/trace_processor/db/null_overlay.cc
deleted file mode 100644
index 71b38f4..0000000
--- a/src/trace_processor/db/null_overlay.cc
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "src/trace_processor/db/null_overlay.h"
-
-namespace perfetto {
-namespace trace_processor {
-namespace overlays {
-
-void NullOverlay::Filter(FilterOp op, SqlValue, RowMap& rm) const {
-  if (op == FilterOp::kIsNull) {
-    rm.Intersect(RowMap(null_bv_->Not()));
-    return;
-  }
-  if (op == FilterOp::kIsNotNull) {
-    rm.Intersect(RowMap(null_bv_->Copy()));
-    return;
-  }
-
-  // Row map for filtered data, not the size of whole column.
-  RowMap filtered_data_rm(0, null_bv_->CountSetBits());
-  // inner_->Filter(op, sql_val, filtered_data_rm);
-
-  // Select only rows that were not filtered out from null BitVector and
-  // intersect it with RowMap&.
-  rm.Intersect(RowMap(null_bv_->Copy()).SelectRows(filtered_data_rm));
-}
-
-void NullOverlay::StableSort(uint32_t* rows, uint32_t rows_size) const {
-  uint32_t count_set_bits = null_bv_->CountSetBits();
-
-  std::vector<uint32_t> non_null_rows(count_set_bits);
-  std::vector<uint32_t> storage_to_rows(count_set_bits);
-
-  // Saving the map from `out` index to `storage` index gives us free `IsSet()`
-  // function, which would be very expensive otherwise.
-  for (auto it = null_bv_->IterateSetBits(); it; it.Next()) {
-    storage_to_rows[it.ordinal()] = it.index();
-  }
-
-  uint32_t cur_non_null_id = 0;
-  uint32_t cur_null_id = 0;
-
-  // Sort elements into null and non null.
-  for (uint32_t i = 0; i < rows_size; ++i) {
-    uint32_t row_idx = rows[i];
-    auto it = std::lower_bound(storage_to_rows.begin(), storage_to_rows.end(),
-                               row_idx);
-
-    // This condition holds if the row is null.
-    if (it == storage_to_rows.end() || *it != row_idx) {
-      // We can override the out because we already used this data.
-      rows[cur_null_id++] = row_idx;
-      continue;
-    }
-
-    uint32_t non_null_idx =
-        static_cast<uint32_t>(std::distance(storage_to_rows.begin(), it));
-    non_null_rows[cur_non_null_id++] = non_null_idx;
-  }
-
-  // Sort storage and translate them into `rows` indices.
-  // inner_->StableSort(non_null_rows.data(), count_set_bits);
-  uint32_t set_rows_offset = null_bv_->size() - count_set_bits;
-  for (uint32_t i = 0; i < count_set_bits; ++i) {
-    rows[set_rows_offset + i] = storage_to_rows[non_null_rows[i]];
-  }
-}
-
-}  // namespace overlays
-}  // namespace trace_processor
-}  // namespace perfetto
diff --git a/src/trace_processor/db/null_overlay.h b/src/trace_processor/db/null_overlay.h
deleted file mode 100644
index dc38f38..0000000
--- a/src/trace_processor/db/null_overlay.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SRC_TRACE_PROCESSOR_DB_NULL_OVERLAY_H_
-#define SRC_TRACE_PROCESSOR_DB_NULL_OVERLAY_H_
-
-#include "src/trace_processor/db/storage/storage.h"
-
-namespace perfetto {
-namespace trace_processor {
-namespace overlays {
-
-// Overlay responsible for operations related to column nullability.
-class NullOverlay {
- public:
-  explicit NullOverlay(const BitVector* null_bv) : null_bv_(null_bv) {}
-
-  void Filter(FilterOp, SqlValue, RowMap&) const;
-  void StableSort(uint32_t* rows, uint32_t rows_size) const;
-
- private:
-  // Vector of data nullability.
-  const BitVector* null_bv_;
-};
-
-}  // namespace overlays
-}  // namespace trace_processor
-}  // namespace perfetto
-
-#endif  // SRC_TRACE_PROCESSOR_DB_NULL_OVERLAY_H_
diff --git a/src/trace_processor/db/overlays/BUILD.gn b/src/trace_processor/db/overlays/BUILD.gn
index c35891f..21615b0 100644
--- a/src/trace_processor/db/overlays/BUILD.gn
+++ b/src/trace_processor/db/overlays/BUILD.gn
@@ -29,6 +29,7 @@
     "../../../../gn:default_deps",
     "../../../base",
     "../../containers",
+    "../storage",
   ]
 }
 
diff --git a/src/trace_processor/db/overlays/null_overlay.cc b/src/trace_processor/db/overlays/null_overlay.cc
index c5f5f66..3ecb317 100644
--- a/src/trace_processor/db/overlays/null_overlay.cc
+++ b/src/trace_processor/db/overlays/null_overlay.cc
@@ -16,6 +16,7 @@
 
 #include "src/trace_processor/db/overlays/null_overlay.h"
 #include "perfetto/ext/base/flat_hash_map.h"
+#include "src/trace_processor/db/overlays/types.h"
 
 namespace perfetto {
 namespace trace_processor {
@@ -30,9 +31,19 @@
   return StorageRange({Range(start, end)});
 }
 
-TableBitVector NullOverlay::MapToTableBitVector(StorageBitVector s_bv) const {
+TableBitVector NullOverlay::MapToTableBitVector(StorageBitVector s_bv,
+                                                OverlayOp op) const {
   BitVector res = non_null_->Copy();
   res.UpdateSetBits(s_bv.bv);
+
+  if (op != OverlayOp::kIsNull)
+    return {std::move(res)};
+
+  if (res.CountSetBits() == 0)
+    return {non_null_->Not()};
+
+  BitVector not_non_null = non_null_->Not();
+  res.Or(not_non_null);
   return {std::move(res)};
 }
 
@@ -42,7 +53,7 @@
   PERFETTO_DCHECK(t_iv.indices.size() <= non_null_->size());
 
   if (op != OverlayOp::kOther)
-    return BitVector();
+    return BitVector(t_iv.size(), false);
 
   BitVector in_storage(static_cast<uint32_t>(t_iv.indices.size()), false);
 
@@ -73,7 +84,7 @@
     OverlayOp op,
     const TableIndexVector& t_iv_overlay_idx) const {
   if (op == OverlayOp::kOther)
-    return BitVector();
+    return BitVector(t_iv_overlay_idx.size(), false);
 
   BitVector res(static_cast<uint32_t>(t_iv_overlay_idx.indices.size()), false);
   if (op == OverlayOp::kIsNull) {
diff --git a/src/trace_processor/db/overlays/null_overlay.h b/src/trace_processor/db/overlays/null_overlay.h
index 93da2c0..328dc9b 100644
--- a/src/trace_processor/db/overlays/null_overlay.h
+++ b/src/trace_processor/db/overlays/null_overlay.h
@@ -27,11 +27,12 @@
 // using BitVector.
 class NullOverlay : public StorageOverlay {
  public:
-  explicit NullOverlay(BitVector* null) : non_null_(std::move(null)) {}
+  explicit NullOverlay(const BitVector* null) : non_null_(std::move(null)) {}
 
   StorageRange MapToStorageRange(TableRange) const override;
 
-  TableBitVector MapToTableBitVector(StorageBitVector) const override;
+  TableBitVector MapToTableBitVector(StorageBitVector,
+                                     OverlayOp) const override;
 
   BitVector IsStorageLookupRequired(OverlayOp,
                                     const TableIndexVector&) const override;
@@ -44,7 +45,7 @@
 
  private:
   // Non null data in the overlay.
-  BitVector* non_null_;
+  const BitVector* non_null_;
 };
 
 }  // namespace overlays
diff --git a/src/trace_processor/db/overlays/null_overlay_unittest.cc b/src/trace_processor/db/overlays/null_overlay_unittest.cc
index 6f80eba..0c02ad7 100644
--- a/src/trace_processor/db/overlays/null_overlay_unittest.cc
+++ b/src/trace_processor/db/overlays/null_overlay_unittest.cc
@@ -46,13 +46,29 @@
 
   BitVector storage_bv{0, 1, 0, 1};
   TableBitVector table_bv =
-      overlay.MapToTableBitVector({std::move(storage_bv)});
+      overlay.MapToTableBitVector({std::move(storage_bv)}, OverlayOp::kOther);
 
   ASSERT_EQ(table_bv.bv.CountSetBits(), 2u);
   ASSERT_TRUE(table_bv.bv.IsSet(2));
   ASSERT_TRUE(table_bv.bv.IsSet(6));
 }
 
+TEST(NullOverlay, MapToTableBitVectorIsNull) {
+  BitVector bv{0, 1, 1, 0, 0, 1, 1, 0};
+  NullOverlay overlay(&bv);
+
+  BitVector storage_bv{0, 1, 0, 1};
+  TableBitVector table_bv =
+      overlay.MapToTableBitVector({std::move(storage_bv)}, OverlayOp::kIsNull);
+
+  // Result is all of the zeroes from |bv| and set bits from |storage_bv|
+  // 1, 0, 1, 1, 1, 0, 1, 1
+
+  ASSERT_EQ(table_bv.bv.CountSetBits(), 6u);
+  ASSERT_FALSE(table_bv.bv.IsSet(1));
+  ASSERT_FALSE(table_bv.bv.IsSet(5));
+}
+
 TEST(NullOverlay, IsStorageLookupRequiredNullOp) {
   BitVector bv{0, 1, 1, 0, 0, 1, 1, 0};
   NullOverlay overlay(&bv);
@@ -61,7 +77,7 @@
   BitVector lookup_bv =
       overlay.IsStorageLookupRequired(OverlayOp::kIsNull, {table_idx});
 
-  ASSERT_EQ(lookup_bv.size(), 0u);
+  ASSERT_EQ(lookup_bv.CountSetBits(), 0u);
 }
 
 TEST(NullOverlay, IsStorageLookupRequiredOtherOp) {
@@ -96,7 +112,7 @@
   std::vector<uint32_t> table_idx{0, 3, 4};
   BitVector idx_search_bv = overlay.IndexSearch(OverlayOp::kOther, {table_idx});
 
-  ASSERT_EQ(idx_search_bv.size(), 0u);
+  ASSERT_EQ(idx_search_bv.CountSetBits(), 0u);
 }
 
 TEST(NullOverlay, IndexSearchIsNullOp) {
diff --git a/src/trace_processor/db/overlays/selector_overlay.cc b/src/trace_processor/db/overlays/selector_overlay.cc
index e501ddf..bc6fcfb 100644
--- a/src/trace_processor/db/overlays/selector_overlay.cc
+++ b/src/trace_processor/db/overlays/selector_overlay.cc
@@ -15,6 +15,7 @@
  */
 
 #include "src/trace_processor/db/overlays/selector_overlay.h"
+#include "src/trace_processor/containers/bit_vector.h"
 
 namespace perfetto {
 namespace trace_processor {
@@ -29,9 +30,9 @@
             selected_->IndexOfNthSet(t_range.range.end - 1) + 1)};
 }
 
-TableBitVector SelectorOverlay::MapToTableBitVector(
-    StorageBitVector s_bv) const {
-  PERFETTO_DCHECK(s_bv.bv.size() == selected_->size());
+TableBitVector SelectorOverlay::MapToTableBitVector(StorageBitVector s_bv,
+                                                    OverlayOp) const {
+  PERFETTO_DCHECK(selected_->size() >= s_bv.bv.size());
   BitVector res(selected_->CountSetBits());
   // TODO(b/283763282): Implement this variation of |UpdateSetBits| in
   // BitVector.
diff --git a/src/trace_processor/db/overlays/selector_overlay.h b/src/trace_processor/db/overlays/selector_overlay.h
index 70be983..1cad92c 100644
--- a/src/trace_processor/db/overlays/selector_overlay.h
+++ b/src/trace_processor/db/overlays/selector_overlay.h
@@ -18,6 +18,7 @@
 #define SRC_TRACE_PROCESSOR_DB_OVERLAYS_SELECTOR_OVERLAY_H_
 
 #include "src/trace_processor/db/overlays/storage_overlay.h"
+#include "src/trace_processor/db/overlays/types.h"
 
 namespace perfetto {
 namespace trace_processor {
@@ -30,7 +31,8 @@
 
   StorageRange MapToStorageRange(TableRange) const override;
 
-  TableBitVector MapToTableBitVector(StorageBitVector) const override;
+  TableBitVector MapToTableBitVector(StorageBitVector,
+                                     OverlayOp) const override;
 
   BitVector IsStorageLookupRequired(OverlayOp,
                                     const TableIndexVector&) const override;
diff --git a/src/trace_processor/db/overlays/selector_overlay_unittest.cc b/src/trace_processor/db/overlays/selector_overlay_unittest.cc
index 6e14597..8c743e8 100644
--- a/src/trace_processor/db/overlays/selector_overlay_unittest.cc
+++ b/src/trace_processor/db/overlays/selector_overlay_unittest.cc
@@ -46,7 +46,7 @@
 
   BitVector storage_bv{1, 0, 1, 0, 1, 0, 1, 0};
   TableBitVector table_bv =
-      overlay.MapToTableBitVector({std::move(storage_bv)});
+      overlay.MapToTableBitVector({std::move(storage_bv)}, OverlayOp::kOther);
 
   ASSERT_EQ(table_bv.bv.size(), 4u);
   ASSERT_EQ(table_bv.bv.CountSetBits(), 2u);
diff --git a/src/trace_processor/db/overlays/storage_overlay.h b/src/trace_processor/db/overlays/storage_overlay.h
index 58dbf50..c31610b 100644
--- a/src/trace_processor/db/overlays/storage_overlay.h
+++ b/src/trace_processor/db/overlays/storage_overlay.h
@@ -34,16 +34,16 @@
 // indices and storage indices. i.e. even if "table indices" we are working with
 // come from another overlay, we still consider them as having come from the
 // table and vice versa for "storage indices".
+//
+// The core functions in this class work with input and output arguments which
+// use the same data structure but have different semantics (i.e. input might
+// be in terms of storage indices and output might be in terms of table
+// indices).
+//
+// For this reason, we use the defined wrapper structs which "tag" the data
+// structure with the semantics.
 class StorageOverlay {
  public:
-  // The core functions in this class work with input and output arguments which
-  // use the same data structure but have different semantics (i.e. input might
-  // be in terms of storage indices and output might be in terms of table
-  // indices).
-  //
-  // For this reason, we use the defined wrapper structs which "tag" the data
-  // structure with the semantics.
-
   virtual ~StorageOverlay();
 
   // Maps a range of indices in table space to an equivalent range of
@@ -52,7 +52,8 @@
 
   // Maps a BitVector of indices in storage space to an equivalent range of
   // indices in the table space.
-  virtual TableBitVector MapToTableBitVector(StorageBitVector) const = 0;
+  virtual TableBitVector MapToTableBitVector(StorageBitVector,
+                                             OverlayOp) const = 0;
 
   // Returns a BitVector where each boolean indicates if the corresponding index
   // in |indices| needs to be mapped and searched in the storage or if the
diff --git a/src/trace_processor/db/overlays/types.h b/src/trace_processor/db/overlays/types.h
index 4e72a41..7978ada 100644
--- a/src/trace_processor/db/overlays/types.h
+++ b/src/trace_processor/db/overlays/types.h
@@ -18,6 +18,7 @@
 
 #include "src/trace_processor/containers/bit_vector.h"
 #include "src/trace_processor/containers/row_map.h"
+#include "src/trace_processor/db/storage/types.h"
 
 namespace perfetto {
 namespace trace_processor {
@@ -46,11 +47,15 @@
 // Represents a vector of indices in the table space.
 struct TableIndexVector {
   std::vector<uint32_t> indices;
+
+  uint32_t size() const { return static_cast<uint32_t>(indices.size()); }
 };
 
 // Represents a vector of indices in the storage space.
 struct StorageIndexVector {
   std::vector<uint32_t> indices;
+
+  uint32_t size() const { return static_cast<uint32_t>(indices.size()); }
 };
 
 // A subset of FilterOp containing operations which can be handled by
@@ -61,6 +66,16 @@
   kOther,
 };
 
+inline OverlayOp FilterOpToOverlayOp(FilterOp op) {
+  if (op == FilterOp::kIsNull) {
+    return OverlayOp::kIsNull;
+  }
+  if (op == FilterOp::kIsNotNull) {
+    return OverlayOp::kIsNotNull;
+  }
+  return OverlayOp::kOther;
+}
+
 // Contains estimates of the cost for each of method in this class per row.
 struct CostEstimatePerRow {
   uint32_t to_storage_range;
diff --git a/src/trace_processor/db/query_executor.cc b/src/trace_processor/db/query_executor.cc
new file mode 100644
index 0000000..94ff142
--- /dev/null
+++ b/src/trace_processor/db/query_executor.cc
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <cstddef>
+#include <memory>
+#include <numeric>
+#include <vector>
+
+#include "perfetto/base/logging.h"
+#include "perfetto/ext/base/status_or.h"
+#include "src/trace_processor/db/overlays/null_overlay.h"
+#include "src/trace_processor/db/overlays/storage_overlay.h"
+#include "src/trace_processor/db/query_executor.h"
+#include "src/trace_processor/db/storage/numeric_storage.h"
+#include "src/trace_processor/db/table.h"
+
+namespace perfetto {
+namespace trace_processor {
+
+namespace {
+
+using Range = RowMap::Range;
+using OverlayOp = overlays::OverlayOp;
+using StorageRange = overlays::StorageRange;
+using TableRange = overlays::TableRange;
+using Storage = storage::Storage;
+using StorageOverlay = overlays::StorageOverlay;
+using TableIndexVector = overlays::TableIndexVector;
+using StorageIndexVector = overlays::StorageIndexVector;
+using TableBitVector = overlays::TableBitVector;
+using StorageBitVector = overlays::StorageBitVector;
+using OverlaysVec = base::SmallVector<const overlays::StorageOverlay*,
+                                      QueryExecutor::kMaxOverlayCount>;
+
+// Helper struct to simplify operations on |global| and |current| sets of
+// indices. Having this coupling enables efficient implementation of
+// IndexedColumnFilter.
+struct IndexFilterHelper {
+  explicit IndexFilterHelper(std::vector<uint32_t> indices) {
+    current_ = indices;
+    global_ = std::move(indices);
+  }
+
+  // Removes pairs of elements that are not set in the |bv| and returns
+  // Indices made of them.
+  static std::pair<IndexFilterHelper, IndexFilterHelper> Partition(
+      IndexFilterHelper indices,
+      const BitVector& bv) {
+    if (bv.CountSetBits() == 0) {
+      return {IndexFilterHelper(), indices};
+    }
+
+    IndexFilterHelper set_partition;
+    IndexFilterHelper non_set_partition;
+    for (auto it = bv.IterateAllBits(); it; it.Next()) {
+      uint32_t idx = it.index();
+      if (it.IsSet()) {
+        set_partition.PushBack({indices.current_[idx], indices.global_[idx]});
+      } else {
+        non_set_partition.PushBack(
+            {indices.current_[idx], indices.global_[idx]});
+      }
+    }
+    return {set_partition, non_set_partition};
+  }
+
+  // Removes pairs of elements that are not set in the |bv|. Returns count of
+  // removed elements.
+  uint32_t KeepAtSet(BitVector filter_nulls) {
+    PERFETTO_CHECK(filter_nulls.size() == current_.size() ||
+                   filter_nulls.CountSetBits() == 0);
+    uint32_t count_removed =
+        static_cast<uint32_t>(current_.size()) - filter_nulls.CountSetBits();
+
+    uint32_t i = 0;
+    auto filter = [&i, &filter_nulls](uint32_t) {
+      return !filter_nulls.IsSet(i++);
+    };
+
+    auto current_it = std::remove_if(current_.begin(), current_.end(), filter);
+    current_.erase(current_it, current_.end());
+
+    i = 0;
+    auto global_it = std::remove_if(global_.begin(), global_.end(), filter);
+    global_.erase(global_it, global_.end());
+
+    return count_removed;
+  }
+
+  std::vector<uint32_t>& current() { return current_; }
+
+  std::vector<uint32_t>& global() { return global_; }
+
+ private:
+  IndexFilterHelper() = default;
+
+  void PushBack(std::pair<uint32_t, uint32_t> cur_and_global_idx) {
+    current_.push_back(cur_and_global_idx.first);
+    global_.push_back(cur_and_global_idx.second);
+  }
+
+  std::vector<uint32_t> current_;
+  std::vector<uint32_t> global_;
+};
+}  // namespace
+
+void QueryExecutor::FilterColumn(const Constraint& c,
+                                 const SimpleColumn& col,
+                                 RowMap* rm) {
+  if (rm->empty())
+    return;
+
+  uint32_t rm_size = rm->size();
+  uint32_t rm_first = rm->Get(0);
+  uint32_t rm_last = rm->Get(rm_size - 1);
+  uint32_t range_size = rm_last - rm_first;
+  // If the number of elements in the rowmap is small or the number of elements
+  // is less than 1/10th of the range, use indexed filtering.
+  // TODO(b/283763282): use Overlay estimations.
+  if (rm->IsIndexVector() || rm_size < 1024 || rm_size * 10 < range_size) {
+    *rm = IndexSearch(c, col, rm);
+    return;
+  }
+
+  BitVector bv = LinearSearch(c, col, rm);
+  if (rm->IsRange()) {
+    // If |rm| is a range, the BitVector returned by LinearSearch perfectly
+    // captures the previously filtered results already so does not need to
+    // be intersected.
+    *rm = RowMap(std::move(bv));
+  } else if (rm->IsBitVector()) {
+    // We need to reconcile our BitVector with |rm| to ensure that we don't
+    // discard results from previous searches.
+    rm->Intersect(RowMap(std::move(bv)));
+  } else {
+    // As we use |IsIndexVector()| above, to always use IndexSearch, we should
+    // never hit this case.
+    PERFETTO_FATAL("Should not happen");
+  }
+}
+
+BitVector QueryExecutor::LinearSearch(const Constraint& c,
+                                      const SimpleColumn& col,
+                                      RowMap* rm) {
+  // TODO(b/283763282): We should align these to word boundaries.
+  TableRange table_range{Range(rm->Get(0), rm->Get(rm->size() - 1) + 1)};
+  base::SmallVector<Range, kMaxOverlayCount> overlay_bounds;
+
+  for (const auto& overlay : col.overlays) {
+    StorageRange storage_range = overlay->MapToStorageRange(table_range);
+    overlay_bounds.emplace_back(storage_range.range);
+    table_range = TableRange({storage_range.range});
+  }
+
+  // Use linear search algorithm on storage.
+  overlays::StorageBitVector filtered_storage{
+      col.storage->LinearSearch(c.op, c.value, table_range.range)};
+
+  for (uint32_t i = 0; i < col.overlays.size(); ++i) {
+    uint32_t rev_i = static_cast<uint32_t>(col.overlays.size()) - 1 - i;
+    TableBitVector mapped_to_table = col.overlays[rev_i]->MapToTableBitVector(
+        std::move(filtered_storage), overlays::FilterOpToOverlayOp(c.op));
+    filtered_storage = StorageBitVector({std::move(mapped_to_table.bv)});
+  }
+  return std::move(filtered_storage.bv);
+}
+
+RowMap QueryExecutor::IndexSearch(const Constraint& c,
+                                  const SimpleColumn& col,
+                                  RowMap* rm) {
+  // Create outmost TableIndexVector.
+  std::vector<uint32_t> table_indices;
+  table_indices.reserve(rm->size());
+  for (auto it = rm->IterateRows(); it; it.Next()) {
+    table_indices.push_back(it.index());
+  }
+
+  // Datastructures for storing data across overlays.
+  IndexFilterHelper to_filter(std::move(table_indices));
+  std::vector<uint32_t> valid;
+  uint32_t count_removed = 0;
+
+  // Fetch the list of indices that require storage lookup and deal with all
+  // of the indices that can be compared before it.
+  OverlayOp op = overlays::FilterOpToOverlayOp(c.op);
+  for (const auto& overlay : col.overlays) {
+    BitVector partition =
+        overlay->IsStorageLookupRequired(op, {to_filter.current()});
+
+    // Most overlays don't require partitioning.
+    if (partition.CountSetBits() == partition.size()) {
+      to_filter.current() =
+          overlay->MapToStorageIndexVector({to_filter.current()}).indices;
+      continue;
+    }
+
+    // Separate indices that don't require storage lookup. Those can be dealt
+    // with in each pass.
+    auto [storage_lookup, no_storage_lookup] =
+        IndexFilterHelper::Partition(to_filter, partition);
+    to_filter = storage_lookup;
+
+    // Erase the values which don't match the constraint and add the
+    // remaining ones to the result.
+    BitVector valid_bv =
+        overlay->IndexSearch(op, {no_storage_lookup.current()});
+    count_removed += no_storage_lookup.KeepAtSet(std::move(valid_bv));
+    valid.insert(valid.end(), no_storage_lookup.global().begin(),
+                 no_storage_lookup.global().end());
+
+    // Update the current indices to the next storage overlay.
+    to_filter.current() =
+        overlay->MapToStorageIndexVector({to_filter.current()}).indices;
+  }
+
+  BitVector matched_in_storage = col.storage->IndexSearch(
+      c.op, c.value, to_filter.current().data(),
+      static_cast<uint32_t>(to_filter.current().size()));
+  count_removed += to_filter.KeepAtSet(std::move(matched_in_storage));
+  valid.insert(valid.end(), to_filter.global().begin(),
+               to_filter.global().end());
+
+  PERFETTO_CHECK(rm->size() == valid.size() + count_removed);
+
+  std::sort(valid.begin(), valid.end());
+  return RowMap(std::move(valid));
+}
+
+RowMap QueryExecutor::FilterLegacy(const Table* table,
+                                   const std::vector<Constraint>& c_vec) {
+  RowMap rm(0, table->row_count());
+  for (const auto& c : c_vec) {
+    const Column& col = table->columns()[c.col_idx];
+    bool use_legacy = rm.size() == 1;
+    use_legacy = use_legacy || col.col_type() == ColumnType::kString ||
+                 col.col_type() == ColumnType::kDummy ||
+                 col.col_type() == ColumnType::kId;
+    use_legacy = use_legacy || col.type() != c.value.type;
+    use_legacy = use_legacy ||
+                 col.overlay().row_map().size() != col.storage_base().size();
+    use_legacy = use_legacy || col.IsSorted() || col.IsDense() || col.IsSetId();
+    use_legacy = use_legacy || col.overlay().row_map().IsIndexVector();
+    if (use_legacy) {
+      col.FilterInto(c.op, c.value, &rm);
+      continue;
+    }
+
+    const void* s_data = col.storage_base().data();
+    uint32_t s_size = col.storage_base().non_null_size();
+
+    storage::NumericStorage storage(s_data, s_size, col.col_type());
+    overlays::NullOverlay null_overlay(col.storage_base().bv());
+
+    SimpleColumn s_col{OverlaysVec(), &storage};
+    if (col.IsNullable()) {
+      s_col.overlays.emplace_back(&null_overlay);
+    }
+
+    uint32_t pre_count = rm.size();
+    FilterColumn(c, s_col, &rm);
+    PERFETTO_DCHECK(rm.size() <= pre_count);
+  }
+  return rm;
+}
+
+}  // namespace trace_processor
+}  // namespace perfetto
diff --git a/src/trace_processor/db/query_executor.h b/src/trace_processor/db/query_executor.h
new file mode 100644
index 0000000..2e04bb9
--- /dev/null
+++ b/src/trace_processor/db/query_executor.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef SRC_TRACE_PROCESSOR_DB_QUERY_EXECUTOR_H_
+#define SRC_TRACE_PROCESSOR_DB_QUERY_EXECUTOR_H_
+
+#include <array>
+#include <numeric>
+#include <vector>
+
+#include "perfetto/ext/base/small_vector.h"
+#include "src/trace_processor/containers/bit_vector.h"
+#include "src/trace_processor/containers/row_map.h"
+#include "src/trace_processor/db/column.h"
+#include "src/trace_processor/db/overlays/storage_overlay.h"
+#include "src/trace_processor/db/overlays/types.h"
+#include "src/trace_processor/db/storage/storage.h"
+
+namespace perfetto {
+namespace trace_processor {
+
+// Responsible for executing filtering/sorting operations on a single Table.
+// TODO(b/283763282): Introduce sorting.
+class QueryExecutor {
+ public:
+  static constexpr uint32_t kMaxOverlayCount = 8;
+
+  // Overlay-based definition of the column.
+  struct SimpleColumn {
+    base::SmallVector<const overlays::StorageOverlay*, kMaxOverlayCount>
+        overlays;
+    const storage::Storage* storage;
+  };
+
+  // |row_count| is the size of the last overlay.
+  QueryExecutor(const std::vector<SimpleColumn>& columns, uint32_t row_count)
+      : columns_(columns), row_count_(row_count) {}
+
+  // Apply all the constraints on the data and return the filtered RowMap.
+  RowMap Filter(const std::vector<Constraint>& cs) {
+    RowMap rm(0, row_count_);
+    for (const auto& c : cs) {
+      FilterColumn(c, columns_[c.col_idx], &rm);
+    }
+    return rm;
+  }
+
+  // Sorts using vector of Order.
+  // TODO(b/283763282): Implement.
+  RowMap Sort(const std::vector<Order>&) { PERFETTO_FATAL("Not implemented."); }
+
+  // Enables QueryExecutor::Filter on Table columns.
+  // TODO(b/283763282): Implement.
+  static RowMap FilterLegacy(const Table*, const std::vector<Constraint>&);
+
+  // Enables QueryExecutor::Sort on Table columns.
+  // TODO(b/283763282): Implement.
+  static RowMap SortLegacy(const Table*, const std::vector<Order>&) {
+    PERFETTO_FATAL("Not implemented.");
+  }
+
+  // Used only in unittests. Exposes private function.
+  static RowMap BoundedColumnFilterForTesting(const Constraint& c,
+                                              const SimpleColumn& col,
+                                              RowMap* rm) {
+    return RowMap(LinearSearch(c, col, rm));
+  }
+
+  // Used only in unittests. Exposes private function.
+  static RowMap IndexedColumnFilterForTesting(const Constraint& c,
+                                              const SimpleColumn& col,
+                                              RowMap* rm) {
+    return IndexSearch(c, col, rm);
+  }
+
+ private:
+  // Updates RowMap with result of filtering single column using the Constraint.
+  static void FilterColumn(const Constraint&, const SimpleColumn&, RowMap*);
+
+  // Filters the column using Range algorithm - tries to find the smallest Range
+  // to filter the storage with.
+  static BitVector LinearSearch(const Constraint&,
+                                const SimpleColumn&,
+                                RowMap*);
+
+  // Filters the column using Index algorithm - finds the indices to filter the
+  // storage with.
+  static RowMap IndexSearch(const Constraint&, const SimpleColumn&, RowMap*);
+
+  std::vector<SimpleColumn> columns_;
+
+  // Number of rows in the outmost overlay.
+  uint32_t row_count_ = 0;
+};
+
+}  // namespace trace_processor
+}  // namespace perfetto
+
+#endif  // SRC_TRACE_PROCESSOR_DB_QUERY_EXECUTOR_H_
diff --git a/src/trace_processor/db/query_executor_unittest.cc b/src/trace_processor/db/query_executor_unittest.cc
new file mode 100644
index 0000000..01fff7a
--- /dev/null
+++ b/src/trace_processor/db/query_executor_unittest.cc
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "src/trace_processor/db/query_executor.h"
+#include "src/trace_processor/db/overlays/null_overlay.h"
+#include "src/trace_processor/db/overlays/selector_overlay.h"
+#include "src/trace_processor/db/storage/numeric_storage.h"
+#include "test/gtest_and_gmock.h"
+
+namespace perfetto {
+namespace trace_processor {
+namespace {
+
+using OverlaysVec = base::SmallVector<const overlays::StorageOverlay*,
+                                      QueryExecutor::kMaxOverlayCount>;
+using NumericStorage = storage::NumericStorage;
+using SimpleColumn = QueryExecutor::SimpleColumn;
+using NullOverlay = overlays::NullOverlay;
+using SelectorOverlay = overlays::SelectorOverlay;
+
+TEST(QueryExecutor, OnlyStorageRange) {
+  std::vector<int64_t> storage_data{1, 2, 3, 4, 5};
+  NumericStorage storage(storage_data.data(), 5, ColumnType::kInt64);
+  SimpleColumn col{OverlaysVec(), &storage};
+
+  Constraint c{0, FilterOp::kGe, SqlValue::Long(3)};
+  RowMap rm(0, 5);
+  RowMap res = QueryExecutor::BoundedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 3u);
+  ASSERT_EQ(res.Get(0), 2u);
+}
+
+TEST(QueryExecutor, OnlyStorageRangeIsNull) {
+  std::vector<int64_t> storage_data{1, 2, 3, 4, 5};
+  NumericStorage storage(storage_data.data(), 5, ColumnType::kInt64);
+  SimpleColumn col{OverlaysVec(), &storage};
+
+  Constraint c{0, FilterOp::kIsNull, SqlValue::Long(3)};
+  RowMap rm(0, 5);
+  RowMap res = QueryExecutor::BoundedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 0u);
+}
+
+TEST(QueryExecutor, OnlyStorageIndex) {
+  // Setup storage
+  std::vector<int64_t> storage_data(10);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  std::transform(storage_data.begin(), storage_data.end(), storage_data.begin(),
+                 [](int64_t n) { return n % 5; });
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+
+  SimpleColumn col{OverlaysVec(), &storage};
+  Constraint c{0, FilterOp::kLt, SqlValue::Long(2)};
+  RowMap rm(0, 10);
+  RowMap res = QueryExecutor::IndexedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 4u);
+  ASSERT_EQ(res.Get(0), 0u);
+  ASSERT_EQ(res.Get(1), 1u);
+  ASSERT_EQ(res.Get(2), 5u);
+  ASSERT_EQ(res.Get(3), 6u);
+}
+
+TEST(QueryExecutor, OnlyStorageIndexIsNull) {
+  std::vector<int64_t> storage_data{1, 2, 3, 4, 5};
+  NumericStorage storage(storage_data.data(), 5, ColumnType::kInt64);
+  SimpleColumn col{OverlaysVec(), &storage};
+
+  Constraint c{0, FilterOp::kIsNull, SqlValue::Long(3)};
+  RowMap rm(0, 5);
+  RowMap res = QueryExecutor::IndexedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 0u);
+}
+
+TEST(QueryExecutor, NullOverlayBounds) {
+  std::vector<int64_t> storage_data(5);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+  BitVector bv{1, 1, 0, 1, 1, 0, 0, 0, 1, 0};
+  overlays::NullOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kGe, SqlValue::Long(3)};
+  RowMap rm(0, 10);
+  RowMap res = QueryExecutor::BoundedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 2u);
+  ASSERT_EQ(res.Get(0), 4u);
+  ASSERT_EQ(res.Get(1), 8u);
+}
+
+TEST(QueryExecutor, NullOverlayRangeIsNull) {
+  std::vector<int64_t> storage_data(5);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+  BitVector bv{1, 1, 0, 1, 1, 0, 0, 0, 1, 0};
+  overlays::NullOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kIsNull, SqlValue::Long(3)};
+  RowMap rm(0, 10);
+  RowMap res = QueryExecutor::BoundedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 5u);
+  ASSERT_EQ(res.Get(0), 2u);
+  ASSERT_EQ(res.Get(1), 5u);
+  ASSERT_EQ(res.Get(2), 6u);
+  ASSERT_EQ(res.Get(3), 7u);
+  ASSERT_EQ(res.Get(4), 9u);
+}
+
+TEST(QueryExecutor, NullOverlayIndex) {
+  std::vector<int64_t> storage_data(6);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  std::transform(storage_data.begin(), storage_data.end(), storage_data.begin(),
+                 [](int64_t n) { return n % 3; });
+  NumericStorage storage(storage_data.data(), 6, ColumnType::kInt64);
+
+  BitVector bv{1, 1, 0, 1, 1, 0, 1, 0, 0, 1};
+  NullOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kGe, SqlValue::Long(1)};
+  RowMap rm(0, 10);
+  RowMap res = QueryExecutor::IndexedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 4u);
+  ASSERT_EQ(res.Get(0), 1u);
+  ASSERT_EQ(res.Get(1), 3u);
+  ASSERT_EQ(res.Get(2), 6u);
+  ASSERT_EQ(res.Get(3), 9u);
+}
+
+TEST(QueryExecutor, NullOverlayIndexIsNull) {
+  std::vector<int64_t> storage_data(5);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+  BitVector bv{1, 1, 0, 1, 1, 0, 0, 0, 1, 0};
+  overlays::NullOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kIsNull, SqlValue::Long(3)};
+  RowMap rm(0, 10);
+  RowMap res = QueryExecutor::IndexedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 5u);
+  ASSERT_EQ(res.Get(0), 2u);
+  ASSERT_EQ(res.Get(1), 5u);
+  ASSERT_EQ(res.Get(2), 6u);
+  ASSERT_EQ(res.Get(3), 7u);
+  ASSERT_EQ(res.Get(4), 9u);
+}
+
+TEST(QueryExecutor, SelectorOverlayBounds) {
+  std::vector<int64_t> storage_data(5);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  NumericStorage storage(storage_data.data(), 5, ColumnType::kInt64);
+
+  BitVector bv{1, 1, 0, 0, 1};
+  SelectorOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kGt, SqlValue::Long(1)};
+  RowMap rm(0, 3);
+  RowMap res = QueryExecutor::BoundedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 1u);
+  ASSERT_EQ(res.Get(0), 2u);
+}
+
+TEST(QueryExecutor, SelectorOverlayIndex) {
+  std::vector<int64_t> storage_data(10);
+  std::iota(storage_data.begin(), storage_data.end(), 0);
+  std::transform(storage_data.begin(), storage_data.end(), storage_data.begin(),
+                 [](int64_t n) { return n % 5; });
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+
+  BitVector bv{1, 1, 0, 1, 1, 0, 1, 0, 0, 1};
+  SelectorOverlay overlay(&bv);
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&overlay);
+
+  SimpleColumn col{overlays_vec, &storage};
+
+  Constraint c{0, FilterOp::kGe, SqlValue::Long(2)};
+  RowMap rm(0, 6);
+  RowMap res = QueryExecutor::IndexedColumnFilterForTesting(c, col, &rm);
+
+  ASSERT_EQ(res.size(), 3u);
+  ASSERT_EQ(res.Get(0), 2u);
+  ASSERT_EQ(res.Get(1), 3u);
+  ASSERT_EQ(res.Get(2), 5u);
+}
+
+TEST(QueryExecutor, SingleConstraintWithNullAndSelector) {
+  std::vector<int64_t> storage_data{0, 1, 2, 3, 4, 0, 1, 2, 3, 4};
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+
+  // Select 6 elements from storage, resulting in a vector {0, 1, 3, 4, 1, 2}.
+  BitVector selector_bv{1, 1, 0, 1, 1, 0, 1, 1, 0, 0};
+  SelectorOverlay selector_overlay(&selector_bv);
+
+  // Add nulls, final vector {0, 1, NULL, 3, 4, NULL, 1, 2, NULL}.
+  BitVector null_bv{1, 1, 0, 1, 1, 0, 1, 1, 0};
+  NullOverlay null_overlay(&null_bv);
+
+  // Create the column.
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&null_overlay);
+  overlays_vec.emplace_back(&selector_overlay);
+  SimpleColumn col{overlays_vec, &storage};
+
+  // Filter.
+  Constraint c{0, FilterOp::kGe, SqlValue::Long(2)};
+  QueryExecutor exec({col}, 9);
+  RowMap res = exec.Filter({c});
+
+  ASSERT_EQ(res.size(), 3u);
+  ASSERT_EQ(res.Get(0), 3u);
+  ASSERT_EQ(res.Get(1), 4u);
+  ASSERT_EQ(res.Get(2), 7u);
+}
+
+TEST(QueryExecutor, IsNull) {
+  std::vector<int64_t> storage_data{0, 1, 2, 3, 4, 0, 1, 2, 3, 4};
+  NumericStorage storage(storage_data.data(), 10, ColumnType::kInt64);
+
+  // Select 6 elements from storage, resulting in a vector {0, 1, 3, 4, 1, 2}.
+  BitVector selector_bv{1, 1, 0, 1, 1, 0, 1, 1, 0, 0};
+  SelectorOverlay selector_overlay(&selector_bv);
+
+  // Add nulls, final vector {0, 1, NULL, 3, 4, NULL, 1, 2, NULL}.
+  BitVector null_bv{1, 1, 0, 1, 1, 0, 1, 1, 0};
+  NullOverlay null_overlay(&null_bv);
+
+  // Create the column.
+  OverlaysVec overlays_vec;
+  overlays_vec.emplace_back(&null_overlay);
+  overlays_vec.emplace_back(&selector_overlay);
+  SimpleColumn col{overlays_vec, &storage};
+
+  // Filter.
+  Constraint c{0, FilterOp::kIsNull, SqlValue::Long(0)};
+  QueryExecutor exec({col}, 9);
+  RowMap res = exec.Filter({c});
+
+  ASSERT_EQ(res.size(), 3u);
+  ASSERT_EQ(res.Get(0), 2u);
+  ASSERT_EQ(res.Get(1), 5u);
+  ASSERT_EQ(res.Get(2), 8u);
+}
+
+}  // namespace
+}  // namespace trace_processor
+}  // namespace perfetto
diff --git a/src/trace_processor/db/storage/numeric_storage.cc b/src/trace_processor/db/storage/numeric_storage.cc
index eb1dd08..313ae8b 100644
--- a/src/trace_processor/db/storage/numeric_storage.cc
+++ b/src/trace_processor/db/storage/numeric_storage.cc
@@ -180,7 +180,7 @@
     uint64_t word = 0;
     // This part should be optimised by SIMD and is expected to be fast.
     for (uint32_t k = 0; k < BitVector::kBitsInWord; ++k) {
-      bool comp_result = comparator(start[i + k], typed_val);
+      bool comp_result = comparator(ptr[i + k], typed_val);
       word |= static_cast<uint64_t>(comp_result) << k;
     }
     builder.AppendWord(word);
@@ -194,20 +194,47 @@
   }
 }
 
+template <typename T>
+void TypedLinearSearch(T typed_val,
+                       const T* start,
+                       FilterOp op,
+                       BitVector::Builder& builder) {
+  switch (op) {
+    case FilterOp::kEq:
+      return TypedLinearSearch(typed_val, start, std::equal_to<T>(), builder);
+    case FilterOp::kNe:
+      return TypedLinearSearch(typed_val, start, std::not_equal_to<T>(),
+                               builder);
+    case FilterOp::kLe:
+      return TypedLinearSearch(typed_val, start, std::less_equal<T>(), builder);
+    case FilterOp::kLt:
+      return TypedLinearSearch(typed_val, start, std::less<T>(), builder);
+    case FilterOp::kGt:
+      return TypedLinearSearch(typed_val, start, std::greater<T>(), builder);
+    case FilterOp::kGe:
+      return TypedLinearSearch(typed_val, start, std::greater_equal<T>(),
+                               builder);
+    case FilterOp::kGlob:
+    case FilterOp::kIsNotNull:
+    case FilterOp::kIsNull:
+      PERFETTO_DFATAL("Illegal argument");
+  }
+}
+
 template <typename T, typename Comparator>
 void TypedIndexSearch(T typed_val,
-                      const T* start,
-                      uint32_t* indices,
+                      const T* ptr,
+                      uint32_t* indices_start,
                       Comparator comparator,
                       BitVector::Builder& builder) {
   // Slow path: we compare <64 elements and append to get us to a word
   // boundary.
-  const T* ptr = start;
+  const uint32_t* indices = indices_start;
   uint32_t front_elements = builder.BitsUntilWordBoundaryOrFull();
   for (uint32_t i = 0; i < front_elements; ++i) {
     builder.Append(comparator(ptr[indices[i]], typed_val));
   }
-  ptr += front_elements;
+  indices += front_elements;
 
   // Fast path: we compare as many groups of 64 elements as we can.
   // This should be very easy for the compiler to auto-vectorize.
@@ -216,12 +243,12 @@
     uint64_t word = 0;
     // This part should be optimised by SIMD and is expected to be fast.
     for (uint32_t k = 0; k < BitVector::kBitsInWord; ++k) {
-      bool comp_result = comparator(start[indices[i + k]], typed_val);
+      bool comp_result = comparator(ptr[indices[i + k]], typed_val);
       word |= static_cast<uint64_t>(comp_result) << k;
     }
     builder.AppendWord(word);
   }
-  ptr += fast_path_elements;
+  indices += fast_path_elements;
 
   // Slow path: we compare <64 elements and append to fill the Builder.
   uint32_t back_elements = builder.BitsUntilFull();
@@ -240,21 +267,25 @@
     return BitVector(size(), true);
 
   if (!val.has_value() || op == FilterOp::kIsNull || op == FilterOp::kGlob)
-    return BitVector();
+    return BitVector(size(), false);
 
   BitVector::Builder builder(range.end);
   builder.Skip(range.start);
-  std::visit(
-      [this, range, op, &builder](auto val) {
-        using T = decltype(val);
-        auto* start = static_cast<const T*>(data_) + range.start;
-        std::visit(
-            [start, val, &builder](auto comparator) {
-              TypedLinearSearch(val, start, comparator, builder);
-            },
-            GetFilterOpVariant<T>(op));
-      },
-      *val);
+  if (const auto* u32 = std::get_if<uint32_t>(&*val)) {
+    auto* start = static_cast<const uint32_t*>(data_) + range.start;
+    TypedLinearSearch(*u32, start, op, builder);
+  } else if (const auto* i64 = std::get_if<int64_t>(&*val)) {
+    auto* start = static_cast<const int64_t*>(data_) + range.start;
+    TypedLinearSearch(*i64, start, op, builder);
+  } else if (const auto* i32 = std::get_if<int32_t>(&*val)) {
+    auto* start = static_cast<const int32_t*>(data_) + range.start;
+    TypedLinearSearch(*i32, start, op, builder);
+  } else if (const auto* db = std::get_if<double>(&*val)) {
+    auto* start = static_cast<const double*>(data_) + range.start;
+    TypedLinearSearch(*db, start, op, builder);
+  } else {
+    PERFETTO_DFATAL("Invalid");
+  }
   return std::move(builder).Build();
 }
 
@@ -267,7 +298,7 @@
     return BitVector(size(), true);
 
   if (!val.has_value() || op == FilterOp::kIsNull || op == FilterOp::kGlob)
-    return BitVector();
+    return BitVector(size(), false);
 
   BitVector::Builder builder(indices_count);
   std::visit(
diff --git a/src/trace_processor/db/storage/numeric_storage.h b/src/trace_processor/db/storage/numeric_storage.h
index ab802de..cea44d9 100644
--- a/src/trace_processor/db/storage/numeric_storage.h
+++ b/src/trace_processor/db/storage/numeric_storage.h
@@ -26,9 +26,9 @@
 namespace storage {
 
 // Storage for all numeric type data (i.e. doubles, int32, int64, uint32).
-class NumericStorage : public Storage {
+class NumericStorage final : public Storage {
  public:
-  NumericStorage(void* data, uint32_t size, ColumnType type)
+  NumericStorage(const void* data, uint32_t size, ColumnType type)
       : type_(type), data_(data), size_(size) {}
 
   void StableSort(uint32_t* rows, uint32_t rows_size) const override;
diff --git a/src/trace_processor/db/table.cc b/src/trace_processor/db/table.cc
index 4346377..a4518ef 100644
--- a/src/trace_processor/db/table.cc
+++ b/src/trace_processor/db/table.cc
@@ -19,6 +19,8 @@
 namespace perfetto {
 namespace trace_processor {
 
+bool Table::kUseFilterV2 = false;
+
 Table::Table() = default;
 Table::~Table() = default;
 
diff --git a/src/trace_processor/db/table.h b/src/trace_processor/db/table.h
index c1cf66c..652799e 100644
--- a/src/trace_processor/db/table.h
+++ b/src/trace_processor/db/table.h
@@ -25,9 +25,11 @@
 #include <vector>
 
 #include "perfetto/base/logging.h"
+#include "src/trace_processor/containers/row_map.h"
 #include "src/trace_processor/containers/string_pool.h"
 #include "src/trace_processor/db/column.h"
 #include "src/trace_processor/db/column_storage_overlay.h"
+#include "src/trace_processor/db/query_executor.h"
 #include "src/trace_processor/db/typed_column.h"
 
 namespace perfetto {
@@ -91,6 +93,8 @@
     std::vector<Column> columns;
   };
 
+  static bool kUseFilterV2;
+
   Table();
   virtual ~Table();
 
@@ -115,6 +119,13 @@
   RowMap FilterToRowMap(
       const std::vector<Constraint>& cs,
       RowMap::OptimizeFor optimize_for = RowMap::OptimizeFor::kMemory) const {
+    if (kUseFilterV2) {
+      if (optimize_for == RowMap::OptimizeFor::kMemory) {
+        return QueryExecutor::FilterLegacy(this, cs);
+      }
+      return RowMap(QueryExecutor::FilterLegacy(this, cs).TakeAsIndexVector());
+    }
+
     RowMap rm(0, row_count_, optimize_for);
     for (const Constraint& c : cs) {
       columns_[c.col_idx].FilterInto(c.op, c.value, &rm);
diff --git a/src/trace_processor/importers/ftrace/thread_state_tracker.cc b/src/trace_processor/importers/ftrace/thread_state_tracker.cc
index a80d6a4..d7a83d8 100644
--- a/src/trace_processor/importers/ftrace/thread_state_tracker.cc
+++ b/src/trace_processor/importers/ftrace/thread_state_tracker.cc
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 #include "src/trace_processor/importers/ftrace/thread_state_tracker.h"
+#include <optional>
 
 namespace perfetto {
 namespace trace_processor {
@@ -67,9 +68,13 @@
     // in the |thread_state| table but we track in the |sched_wakeup| table.
     // The |thread_state_id| in |sched_wakeup| is the current running/runnable
     // event.
+    std::optional<uint32_t> irq_context =
+        common_flags
+            ? std::make_optional(CommonFlagsToIrqContext(*common_flags))
+            : std::nullopt;
     storage_->mutable_spurious_sched_wakeup_table()->Insert(
         {event_ts, prev_row_numbers_for_thread_[utid]->last_row.row_number(),
-         CommonFlagsToIrqContext(*common_flags), utid, waker_utid});
+         irq_context, utid, waker_utid});
     return;
   }
 
@@ -80,8 +85,8 @@
 }
 
 void ThreadStateTracker::PushNewTaskEvent(int64_t event_ts,
-                                         UniqueTid utid,
-                                         UniqueTid waker_utid) {
+                                          UniqueTid utid,
+                                          UniqueTid waker_utid) {
   AddOpenState(event_ts, utid, runnable_string_id_, std::nullopt, waker_utid);
 }
 
diff --git a/src/trace_processor/metrics/sql/android/jank/internal/query_frame_slice.sql b/src/trace_processor/metrics/sql/android/jank/internal/query_frame_slice.sql
index 0b50e4a..bebff9b 100644
--- a/src/trace_processor/metrics/sql/android/jank/internal/query_frame_slice.sql
+++ b/src/trace_processor/metrics/sql/android/jank/internal/query_frame_slice.sql
@@ -61,7 +61,7 @@
 -- `relevant_slice_table_name` is not correct (e.g. missing cuj_id).
 DROP TABLE IF EXISTS {{table_name_prefix}}_query_slice;
 CREATE TABLE {{table_name_prefix}}_query_slice AS
-SELECT
+SELECT DISTINCT
   slice.cuj_id,
   slice.utid,
   slice.id,
diff --git a/src/trace_processor/metrics/sql/android/network_activity_template.sql b/src/trace_processor/metrics/sql/android/network_activity_template.sql
index aa163c2..3e83f87 100644
--- a/src/trace_processor/metrics/sql/android/network_activity_template.sql
+++ b/src/trace_processor/metrics/sql/android/network_activity_template.sql
@@ -50,7 +50,7 @@
 with_last AS (
   SELECT
     *,
-    LAG(ts) OVER (
+    LAG(ts+dur) OVER (
       PARTITION BY {{group_by}}
       ORDER BY ts
     ) AS last_ts
diff --git a/src/trace_processor/prelude/functions/create_function.cc b/src/trace_processor/prelude/functions/create_function.cc
index 9ca6ba8..51ae5f6 100644
--- a/src/trace_processor/prelude/functions/create_function.cc
+++ b/src/trace_processor/prelude/functions/create_function.cc
@@ -16,6 +16,9 @@
 
 #include "src/trace_processor/prelude/functions/create_function.h"
 
+#include <queue>
+#include <stack>
+
 #include "perfetto/base/status.h"
 #include "perfetto/trace_processor/basic_types.h"
 #include "src/trace_processor/prelude/functions/create_function_internal.h"
@@ -31,14 +34,89 @@
 
 namespace {
 
+base::StatusOr<ScopedStmt> CreateStatement(PerfettoSqlEngine* engine,
+                                           const std::string& sql,
+                                           const std::string& prototype) {
+  ScopedStmt stmt;
+  const char* tail = nullptr;
+  base::Status status = sqlite_utils::PrepareStmt(engine->sqlite_engine()->db(),
+                                                  sql.c_str(), &stmt, &tail);
+  if (!status.ok()) {
+    return base::ErrStatus(
+        "CREATE_FUNCTION[prototype=%s]: SQLite error when preparing "
+        "statement %s",
+        prototype.c_str(), status.message().c_str());
+  }
+  return std::move(stmt);
+}
+
+base::Status CheckNoMoreRows(sqlite3_stmt* stmt,
+                             sqlite3* db,
+                             const Prototype& prototype) {
+  int ret = sqlite3_step(stmt);
+  RETURN_IF_ERROR(SqliteRetToStatus(db, prototype.function_name, ret));
+  if (ret == SQLITE_ROW) {
+    auto expanded_sql = sqlite_utils::ExpandedSqlForStmt(stmt);
+    return base::ErrStatus(
+        "%s: multiple values were returned when executing function body. "
+        "Executed SQL was %s",
+        prototype.function_name.c_str(), expanded_sql.get());
+  }
+  PERFETTO_DCHECK(ret == SQLITE_DONE);
+  return base::OkStatus();
+}
+
+// Note: if the returned type is string / bytes, it will be invalidated by the
+// next call to SQLite, so the caller must take care to either copy or use the
+// value before calling SQLite again.
+base::StatusOr<SqlValue> EvaluateScalarStatement(sqlite3_stmt* stmt,
+                                                 sqlite3* db,
+                                                 const Prototype& prototype) {
+  int ret = sqlite3_step(stmt);
+  RETURN_IF_ERROR(SqliteRetToStatus(db, prototype.function_name, ret));
+  if (ret == SQLITE_DONE) {
+    // No return value means we just return don't set |out|.
+    return SqlValue();
+  }
+
+  PERFETTO_DCHECK(ret == SQLITE_ROW);
+  size_t col_count = static_cast<size_t>(sqlite3_column_count(stmt));
+  if (col_count != 1) {
+    return base::ErrStatus(
+        "%s: SQL definition should only return one column: returned %zu "
+        "columns",
+        prototype.function_name.c_str(), col_count);
+  }
+
+  SqlValue result =
+      sqlite_utils::SqliteValueToSqlValue(sqlite3_column_value(stmt, 0));
+
+  // If we return a bytes type but have a null pointer, SQLite will convert this
+  // to an SQL null. However, for proto build functions, we actively want to
+  // distinguish between nulls and 0 byte strings. Therefore, change the value
+  // to an empty string.
+  if (result.type == SqlValue::kBytes && result.bytes_value == nullptr) {
+    PERFETTO_DCHECK(result.bytes_count == 0);
+    result.bytes_value = "";
+  }
+
+  return result;
+}
+
+base::Status BindArguments(sqlite3_stmt* stmt,
+                           const Prototype& prototype,
+                           size_t argc,
+                           sqlite3_value** argv) {
+  // Bind all the arguments to the appropriate places in the function.
+  for (size_t i = 0; i < argc; ++i) {
+    RETURN_IF_ERROR(MaybeBindArgument(stmt, prototype.function_name,
+                                      prototype.arguments[i], argv[i]));
+  }
+  return base::OkStatus();
+}
+
 struct CreatedFunction : public SqlFunction {
-  struct Context {
-    PerfettoSqlEngine* engine;
-    Prototype prototype;
-    sql_argument::Type return_type;
-    std::string sql;
-    ScopedStmt stmt;
-  };
+  class Context;
 
   static base::Status Run(Context* ctx,
                           size_t argc,
@@ -49,37 +127,495 @@
   static void Cleanup(Context*);
 };
 
+class Memoizer {
+ public:
+  // Supported arguments. For now, only functions with a single int argument are
+  // supported.
+  using MemoizedArgs = int64_t;
+
+  // Enables memoization.
+  // Only functions with a single int argument returning ints are supported.
+  base::Status EnableMemoization(const Prototype& prototype,
+                                 sql_argument::Type return_type) {
+    if (prototype.arguments.size() != 1 ||
+        TypeToSqlValueType(prototype.arguments[0].type()) !=
+            SqlValue::Type::kLong) {
+      return base::ErrStatus(
+          "EXPERIMENTAL_MEMOIZE: Function %s should take one int argument",
+          prototype.function_name.c_str());
+    }
+    if (TypeToSqlValueType(return_type) != SqlValue::Type::kLong) {
+      return base::ErrStatus(
+          "EXPERIMENTAL_MEMOIZE: Function %s should return an int",
+          prototype.function_name.c_str());
+    }
+    enabled_ = true;
+    return base::OkStatus();
+  }
+
+  // Returns the memoized value for the current invocation if it exists.
+  std::optional<SqlValue> GetMemoizedValue(MemoizedArgs args) {
+    if (!enabled_) {
+      return std::nullopt;
+    }
+    int64_t* value = memoized_values_.Find(args);
+    if (!value) {
+      return std::nullopt;
+    }
+    return SqlValue::Long(*value);
+  }
+
+  bool HasMemoizedValue(MemoizedArgs args) {
+    return GetMemoizedValue(args).has_value();
+  }
+
+  // Saves the return value of the current invocation for memoization.
+  void Memoize(MemoizedArgs args, SqlValue value) {
+    if (!enabled_ || !IsSupportedReturnType(value)) {
+      return;
+    }
+    memoized_values_.Insert(args, value.AsLong());
+  }
+
+  static bool IsSupportedReturnType(const SqlValue& value) {
+    return value.type == SqlValue::Type::kLong;
+  }
+
+  // Checks that the function has a single int argument and returns it.
+  static std::optional<MemoizedArgs> AsMemoizedArgs(size_t argc,
+                                                    sqlite3_value** argv) {
+    if (argc != 1) {
+      return std::nullopt;
+    }
+    SqlValue arg = sqlite_utils::SqliteValueToSqlValue(argv[0]);
+    if (arg.type != SqlValue::Type::kLong) {
+      return std::nullopt;
+    }
+    return arg.AsLong();
+  }
+
+  bool enabled() const { return enabled_; }
+
+ private:
+  bool enabled_ = false;
+  base::FlatHashMap<MemoizedArgs, int64_t> memoized_values_;
+};
+
+// A helper to unroll recursive calls: to minimise the amount of stack space
+// used, memoized recursive calls are evaluated using an on-heap queue.
+//
+// We compute the function in two passes:
+// - In the first pass, we evaluate the statement to discover which recursive
+//   calls it makes, returning null from recursive calls and ignoring the
+//   result.
+// - In the second pass, we evaluate the statement again, but this time we
+//   memoize the result of each recursive call.
+//
+// We maintain a queue for scheduled "first pass" calls and a stack for the
+// scheduled "second pass" calls, evaluating available first pass calls, then
+// second pass calls. When we evaluate a first pass call, the further calls to
+// CreatedFunction::Run will just add it to the "first pass" queue. The second
+// pass, however, will evaluate the function normally, typically just using the
+// memoized result for the dependent calls. However, if the recursive calls
+// depend on the return value of the function, we will proceed with normal
+// recursion.
+//
+// To make it more concrete, consider an following example.
+// We have a function computing factorial (f) and we want to compute f(3).
+//
+// SELECT create_function('f(x INT)', 'INT',
+// 'SELECT IIF($x = 0, 1, $x * f($x - 1))');
+// SELECT experimental_memoize('f');
+// SELECT f(3);
+//
+// - We start with a call to f(3). It executes the statement as normal, which
+//   recursively calls f(2).
+// - When f(2) is called, we detect that it is a recursive call and we start
+//   unrolling it, entering RecursiveCallUnroller::Run.
+// - We schedule first pass for 2 and the state of the unroller
+//   is first_pass: [2], second_pass: [].
+// - Then we compute the first pass for f(2). It calls f(1), which is ignored
+//   due to OnFunctionCall returning kIgnoreDueToFirstPass and 1 is added to the
+//   first pass queue. 2 is taked out of the first pass queue and moved to the
+//   second pass stack. State: first_pass: [1], second_pass: [2].
+// - Then we compute the first pass for 1. The similar thing happens: f(0) is
+//   called and ignored, 0 is added to first_pass, 1 is added to second_pass.
+//   State: first_pass: [0], second_pass: [2, 1].
+// - Then we compute the first pass for 0. It doesn't make further calls, so
+//   0 is moved to the second pass stack.
+//   State: first_pass: [], second_pass: [2, 1, 0].
+// - Then we compute the second pass for 0. It just returns 1.
+//   State: first_pass: [], second_pass: [2, 1], results: {0: 1}.
+// - Then we compute the second pass for 1. It calls f(0), which is memoized.
+//   State: first_pass: [], second_pass: [2], results: {0: 1, 1: 1}.
+// - Then we compute the second pass for 1. It calls f(1), which is memoized.
+//   State: first_pass: [], second_pass: [], results: {0: 1, 1: 1, 2: 2}.
+// - As both first_pass and second_pass are empty, we return from
+//   RecursiveCallUnroller::Run.
+// - Control is returned to CreatedFunction::Run for f(2), which returns
+//   memoized value.
+// - Then control is returned to CreatedFunction::Run for f(3), which completes
+//   the computation.
+class RecursiveCallUnroller {
+ public:
+  RecursiveCallUnroller(PerfettoSqlEngine* engine,
+                        sqlite3_stmt* stmt,
+                        const Prototype& prototype,
+                        Memoizer& memoizer)
+      : engine_(engine),
+        stmt_(stmt),
+        prototype_(prototype),
+        memoizer_(memoizer) {}
+
+  // Whether we should just return null due to us being in the "first pass".
+  enum class FunctionCallState {
+    kIgnoreDueToFirstPass,
+    kEvaluate,
+  };
+
+  base::StatusOr<FunctionCallState> OnFunctionCall(
+      Memoizer::MemoizedArgs args) {
+    // If we are in the second pass, we just continue the function execution,
+    // including checking if a memoized value is available and returning it.
+    //
+    // We generally expect a memoized value to be available, but there are
+    // cases when it might not be the case, e.g. when which recursive calls are
+    // made depends on the return value of the function, e.g. for the following
+    // function, the first pass will not detect f(y) calls, so they will
+    // be computed recursively.
+    // f(x): SELECT max(f(y)) FROM y WHERE y < f($x - 1);
+    if (state_ == State::kComputingSecondPass) {
+      return FunctionCallState::kEvaluate;
+    }
+    if (!memoizer_.HasMemoizedValue(args)) {
+      ArgState* state = visited_.Find(args);
+      if (state) {
+        // Detect recursive loops, e.g. f(1) calling f(2) calling f(1).
+        if (*state == ArgState::kEvaluating) {
+          return base::ErrStatus("Infinite recursion detected");
+        }
+      } else {
+        visited_.Insert(args, ArgState::kScheduled);
+        first_pass_.push(args);
+      }
+    }
+    return FunctionCallState::kIgnoreDueToFirstPass;
+  }
+
+  base::Status Run(Memoizer::MemoizedArgs initial_args) {
+    PERFETTO_TP_TRACE(metatrace::Category::FUNCTION,
+                      "UNROLL_RECURSIVE_FUNCTION_CALL",
+                      [&](metatrace::Record* r) {
+                        r->AddArg("Function", prototype_.function_name);
+                        r->AddArg("Arg 0", std::to_string(initial_args));
+                      });
+
+    first_pass_.push(initial_args);
+    visited_.Insert(initial_args, ArgState::kScheduled);
+
+    while (!first_pass_.empty() || !second_pass_.empty()) {
+      // If we have scheduled first pass calls, we evaluate them first.
+      if (!first_pass_.empty()) {
+        state_ = State::kComputingFirstPass;
+        Memoizer::MemoizedArgs args = first_pass_.front();
+
+        PERFETTO_TP_TRACE(metatrace::Category::FUNCTION, "SQL_FUNCTION_CALL",
+                          [&](metatrace::Record* r) {
+                            r->AddArg("Function", prototype_.function_name);
+                            r->AddArg("Type", "UnrollRecursiveCall_FirstPass");
+                            r->AddArg("Arg 0", std::to_string(args));
+                          });
+
+        first_pass_.pop();
+        second_pass_.push(args);
+        Evaluate(args).status();
+        continue;
+      }
+
+      state_ = State::kComputingSecondPass;
+      Memoizer::MemoizedArgs args = second_pass_.top();
+
+      PERFETTO_TP_TRACE(metatrace::Category::FUNCTION, "SQL_FUNCTION_CALL",
+                        [&](metatrace::Record* r) {
+                          r->AddArg("Function", prototype_.function_name);
+                          r->AddArg("Type", "UnrollRecursiveCall_SecondPass");
+                          r->AddArg("Arg 0", std::to_string(args));
+                        });
+
+      visited_.Insert(args, ArgState::kEvaluating);
+      second_pass_.pop();
+      base::StatusOr<std::optional<int64_t>> result = Evaluate(args);
+      RETURN_IF_ERROR(result.status());
+      std::optional<int64_t> maybe_int_result = result.value();
+      if (!maybe_int_result.has_value()) {
+        continue;
+      }
+      visited_.Insert(args, ArgState::kEvaluated);
+      memoizer_.Memoize(args, SqlValue::Long(*maybe_int_result));
+    }
+    return base::OkStatus();
+  }
+
+ private:
+  // This function returns:
+  // - base::ErrStatus if the evaluation of the function failed.
+  // - std::nullopt if the function returned a non-integer value.
+  // - the result of the function otherwise.
+  base::StatusOr<std::optional<int64_t>> Evaluate(Memoizer::MemoizedArgs args) {
+    RETURN_IF_ERROR(MaybeBindIntArgument(stmt_, prototype_.function_name,
+                                         prototype_.arguments[0], args));
+    base::StatusOr<SqlValue> result = EvaluateScalarStatement(
+        stmt_, engine_->sqlite_engine()->db(), prototype_);
+    sqlite3_reset(stmt_);
+    sqlite3_clear_bindings(stmt_);
+    RETURN_IF_ERROR(result.status());
+    if (result->type != SqlValue::Type::kLong) {
+      return std::optional<int64_t>(std::nullopt);
+    }
+    return std::optional<int64_t>(result->long_value);
+  }
+
+  PerfettoSqlEngine* engine_;
+  sqlite3_stmt* stmt_;
+  const Prototype& prototype_;
+  Memoizer& memoizer_;
+
+  // Current state of the evaluation.
+  enum class State {
+    kComputingFirstPass,
+    kComputingSecondPass,
+  };
+  State state_ = State::kComputingFirstPass;
+
+  // A state of evaluation of a given argument.
+  enum class ArgState {
+    kScheduled,
+    kEvaluating,
+    kEvaluated,
+  };
+
+  // See the class-level comment for the explanation of the two passes.
+  std::queue<Memoizer::MemoizedArgs> first_pass_;
+  base::FlatHashMap<Memoizer::MemoizedArgs, ArgState> visited_;
+  std::stack<Memoizer::MemoizedArgs> second_pass_;
+};
+
+// This class is used to store the state of a CREATE_FUNCTION call.
+// It is used to store the state of the function across multiple invocations
+// of the function (e.g. when the function is called recursively).
+class CreatedFunction::Context {
+ public:
+  explicit Context(PerfettoSqlEngine* engine) : engine_(engine) {}
+
+  // Prepare a statement and push it into the stack of allocated statements
+  // for this function.
+  base::Status PrepareStatement() {
+    base::StatusOr<ScopedStmt> stmt =
+        CreateStatement(engine_, sql_, prototype_str_);
+    RETURN_IF_ERROR(stmt.status());
+    is_valid_ = true;
+    stmts_.push_back(std::move(stmt.value()));
+    return base::OkStatus();
+  }
+
+  // Sets the state of the function. Should be called only when the function
+  // is invalid (i.e. when it is first created or when the previous statement
+  // failed to prepare).
+  void Reset(Prototype prototype,
+             std::string prototype_str,
+             sql_argument::Type return_type,
+             std::string sql) {
+    // Re-registration of valid functions is not allowed.
+    PERFETTO_DCHECK(!is_valid_);
+    PERFETTO_DCHECK(stmts_.empty());
+
+    prototype_ = std::move(prototype);
+    prototype_str_ = std::move(prototype_str);
+    return_type_ = return_type;
+    sql_ = std::move(sql);
+  }
+
+  // This function is called each time the function is called.
+  // It ensures that we have a statement for the current recursion level,
+  // allocating a new one if needed.
+  base::Status PushStackEntry() {
+    ++current_recursion_level_;
+    if (current_recursion_level_ > stmts_.size()) {
+      return PrepareStatement();
+    }
+    return base::OkStatus();
+  }
+
+  // Returns the statement that is used for the current invocation.
+  sqlite3_stmt* CurrentStatement() {
+    return stmts_[current_recursion_level_ - 1].get();
+  }
+
+  // This function is called each time the function returns and resets the
+  // statement that this invocation used.
+  void PopStackEntry() {
+    if (current_recursion_level_ > stmts_.size()) {
+      // This is possible if we didn't prepare the statement and returned
+      // an error.
+      return;
+    }
+    sqlite3_reset(CurrentStatement());
+    sqlite3_clear_bindings(CurrentStatement());
+    --current_recursion_level_;
+  }
+
+  base::StatusOr<RecursiveCallUnroller::FunctionCallState> OnFunctionCall(
+      Memoizer::MemoizedArgs args) {
+    if (!recursive_call_unroller_) {
+      return RecursiveCallUnroller::FunctionCallState::kEvaluate;
+    }
+    return recursive_call_unroller_->OnFunctionCall(args);
+  }
+
+  // Called before checking the function for memoization.
+  base::Status UnrollRecursiveCallIfNeeded(Memoizer::MemoizedArgs args) {
+    if (!memoizer_.enabled() || !is_in_recursive_call() ||
+        recursive_call_unroller_) {
+      return base::OkStatus();
+    }
+    // If we are in a recursive call, we need to check if we have already
+    // computed the result for the current arguments.
+    if (memoizer_.HasMemoizedValue(args)) {
+      return base::OkStatus();
+    }
+
+    // If we are in a beginning of a function call:
+    // - is a recursive,
+    // - can be memoized,
+    // - hasn't been memoized already, and
+    // - hasn't start unrolling yet;
+    // start the unrolling and run the unrolling loop.
+    recursive_call_unroller_ = std::make_unique<RecursiveCallUnroller>(
+        engine_, CurrentStatement(), prototype_, memoizer_);
+    auto status = recursive_call_unroller_->Run(args);
+    recursive_call_unroller_.reset();
+    return status;
+  }
+
+  // Schedule a statement to be validated that it is indeed doesn't have any
+  // more rows.
+  void ScheduleEmptyStatementValidation(sqlite3_stmt* stmt) {
+    empty_stmts_to_validate_.push_back(stmt);
+  }
+
+  base::Status ValidateEmptyStatements() {
+    while (!empty_stmts_to_validate_.empty()) {
+      sqlite3_stmt* stmt = empty_stmts_to_validate_.back();
+      empty_stmts_to_validate_.pop_back();
+      RETURN_IF_ERROR(
+          CheckNoMoreRows(stmt, engine_->sqlite_engine()->db(), prototype_));
+    }
+    return base::OkStatus();
+  }
+
+  bool is_in_recursive_call() const { return current_recursion_level_ > 1; }
+
+  base::Status EnableMemoization() {
+    return memoizer_.EnableMemoization(prototype_, return_type_);
+  }
+
+  PerfettoSqlEngine* engine() const { return engine_; }
+
+  const Prototype& prototype() const { return prototype_; }
+
+  sql_argument::Type return_type() const { return return_type_; }
+
+  const std::string& sql() const { return sql_; }
+
+  bool is_valid() const { return is_valid_; }
+
+  Memoizer& memoizer() { return memoizer_; }
+
+ private:
+  PerfettoSqlEngine* engine_;
+  Prototype prototype_;
+  std::string prototype_str_;
+  sql_argument::Type return_type_;
+  std::string sql_;
+  // Perfetto SQL functions support recursion. Given that each function call in
+  // the stack requires a dedicated statement, we maintain a stack of prepared
+  // statements and use the top one for each new call (allocating a new one if
+  // needed).
+  std::vector<ScopedStmt> stmts_;
+  // A list of statements to verify to ensure that they don't have more rows
+  // in VerifyPostConditions.
+  std::vector<sqlite3_stmt*> empty_stmts_to_validate_;
+  size_t current_recursion_level_ = 0;
+  // Function re-registration is not allowed, but the user is allowed to define
+  // the function again if the first call failed. |is_valid_| flag helps that
+  // by tracking whether the current function definition is valid (in which case
+  // re-registration is not allowed).
+  bool is_valid_ = false;
+  Memoizer memoizer_;
+  // Set if we are in a middle of unrolling a recursive call.
+  std::unique_ptr<RecursiveCallUnroller> recursive_call_unroller_;
+};
+
 base::Status CreatedFunction::Run(CreatedFunction::Context* ctx,
                                   size_t argc,
                                   sqlite3_value** argv,
                                   SqlValue& out,
                                   Destructors&) {
-  if (argc != ctx->prototype.arguments.size()) {
+  if (argc != ctx->prototype().arguments.size()) {
     return base::ErrStatus(
         "%s: invalid number of args; expected %zu, received %zu",
-        ctx->prototype.function_name.c_str(), ctx->prototype.arguments.size(),
-        argc);
+        ctx->prototype().function_name.c_str(),
+        ctx->prototype().arguments.size(), argc);
   }
 
   // Type check all the arguments.
   for (size_t i = 0; i < argc; ++i) {
     sqlite3_value* arg = argv[i];
-    sql_argument::Type type = ctx->prototype.arguments[i].type();
+    sql_argument::Type type = ctx->prototype().arguments[i].type();
     base::Status status = sqlite_utils::TypeCheckSqliteValue(
         arg, sql_argument::TypeToSqlValueType(type),
         sql_argument::TypeToHumanFriendlyString(type));
     if (!status.ok()) {
       return base::ErrStatus("%s[arg=%s]: argument %zu %s",
-                             ctx->prototype.function_name.c_str(),
+                             ctx->prototype().function_name.c_str(),
                              sqlite3_value_text(arg), i, status.c_message());
     }
   }
 
+  // Enter the function and ensure that we have a statement allocated.
+  RETURN_IF_ERROR(ctx->PushStackEntry());
+
+  std::optional<Memoizer::MemoizedArgs> memoized_args =
+      Memoizer::AsMemoizedArgs(argc, argv);
+
+  if (memoized_args) {
+    // If we are in the middle of an recursive calls unrolling, we might want to
+    // ignore the function invocation. See the comment in RecursiveCallUnroller
+    // for more details.
+    base::StatusOr<RecursiveCallUnroller::FunctionCallState> unroll_state =
+        ctx->OnFunctionCall(*memoized_args);
+    RETURN_IF_ERROR(unroll_state.status());
+    if (*unroll_state ==
+        RecursiveCallUnroller::FunctionCallState::kIgnoreDueToFirstPass) {
+      // Return NULL.
+      return base::OkStatus();
+    }
+
+    RETURN_IF_ERROR(ctx->UnrollRecursiveCallIfNeeded(*memoized_args));
+
+    std::optional<SqlValue> memoized_value =
+        ctx->memoizer().GetMemoizedValue(*memoized_args);
+    if (memoized_value) {
+      out = *memoized_value;
+      return base::OkStatus();
+    }
+  }
+
   PERFETTO_TP_TRACE(
-      metatrace::Category::FUNCTION, "CREATE_FUNCTION",
+      metatrace::Category::FUNCTION, "SQL_FUNCTION_CALL",
       [ctx, argv](metatrace::Record* r) {
-        r->AddArg("Function", ctx->prototype.function_name.c_str());
-        for (uint32_t i = 0; i < ctx->prototype.arguments.size(); ++i) {
+        r->AddArg("Function", ctx->prototype().function_name.c_str());
+        for (uint32_t i = 0; i < ctx->prototype().arguments.size(); ++i) {
           std::string key = "Arg " + std::to_string(i);
           const char* value =
               reinterpret_cast<const char*>(sqlite3_value_text(argv[i]));
@@ -88,62 +624,30 @@
         }
       });
 
-  // Bind all the arguments to the appropriate places in the function.
-  for (size_t i = 0; i < argc; ++i) {
-    RETURN_IF_ERROR(MaybeBindArgument(ctx->stmt.get(),
-                                      ctx->prototype.function_name,
-                                      ctx->prototype.arguments[i], argv[i]));
+  RETURN_IF_ERROR(
+      BindArguments(ctx->CurrentStatement(), ctx->prototype(), argc, argv));
+  auto result = EvaluateScalarStatement(ctx->CurrentStatement(),
+                                        ctx->engine()->sqlite_engine()->db(),
+                                        ctx->prototype());
+  RETURN_IF_ERROR(result.status());
+  out = result.value();
+  ctx->ScheduleEmptyStatementValidation(ctx->CurrentStatement());
+
+  if (memoized_args) {
+    ctx->memoizer().Memoize(*memoized_args, out);
   }
 
-  int ret = sqlite3_step(ctx->stmt.get());
-  RETURN_IF_ERROR(SqliteRetToStatus(ctx->engine->sqlite_engine()->db(),
-                                    ctx->prototype.function_name, ret));
-  if (ret == SQLITE_DONE) {
-    // No return value means we just return don't set |out|.
-    return base::OkStatus();
-  }
-
-  PERFETTO_DCHECK(ret == SQLITE_ROW);
-  size_t col_count = static_cast<size_t>(sqlite3_column_count(ctx->stmt.get()));
-  if (col_count != 1) {
-    return base::ErrStatus(
-        "%s: SQL definition should only return one column: returned %zu "
-        "columns",
-        ctx->prototype.function_name.c_str(), col_count);
-  }
-
-  out = sqlite_utils::SqliteValueToSqlValue(
-      sqlite3_column_value(ctx->stmt.get(), 0));
-
-  // If we return a bytes type but have a null pointer, SQLite will convert this
-  // to an SQL null. However, for proto build functions, we actively want to
-  // distinguish between nulls and 0 byte strings. Therefore, change the value
-  // to an empty string.
-  if (out.type == SqlValue::kBytes && out.bytes_value == nullptr) {
-    PERFETTO_DCHECK(out.bytes_count == 0);
-    out.bytes_value = "";
-  }
-  return base::OkStatus();
-}
-
-base::Status CreatedFunction::VerifyPostConditions(Context* ctx) {
-  int ret = sqlite3_step(ctx->stmt.get());
-  RETURN_IF_ERROR(SqliteRetToStatus(ctx->engine->sqlite_engine()->db(),
-                                    ctx->prototype.function_name, ret));
-  if (ret == SQLITE_ROW) {
-    auto expanded_sql = sqlite_utils::ExpandedSqlForStmt(ctx->stmt.get());
-    return base::ErrStatus(
-        "%s: multiple values were returned when executing function body. "
-        "Executed SQL was %s",
-        ctx->prototype.function_name.c_str(), expanded_sql.get());
-  }
-  PERFETTO_DCHECK(ret == SQLITE_DONE);
   return base::OkStatus();
 }
 
 void CreatedFunction::Cleanup(CreatedFunction::Context* ctx) {
-  sqlite3_reset(ctx->stmt.get());
-  sqlite3_clear_bindings(ctx->stmt.get());
+  // Clear the statement.
+  ctx->PopStackEntry();
+}
+
+base::Status CreatedFunction::VerifyPostConditions(
+    CreatedFunction::Context* ctx) {
+  return ctx->ValidateEmptyStatements();
 }
 
 }  // namespace
@@ -153,11 +657,7 @@
                                  sqlite3_value** argv,
                                  SqlValue&,
                                  Destructors&) {
-  if (argc != 3) {
-    return base::ErrStatus(
-        "CREATE_FUNCTION: invalid number of args; expected %u, received %zu",
-        3u, argc);
-  }
+  RETURN_IF_ERROR(sqlite_utils::CheckArgCount("CREATE_FUNCTION", argc, 3u));
 
   sqlite3_value* prototype_value = argv[0];
   sqlite3_value* return_type_value = argv[1];
@@ -211,66 +711,80 @@
         return_type_str.ToStdString().c_str());
   }
 
+  std::string function_name = prototype.function_name;
   int created_argc = static_cast<int>(prototype.arguments.size());
-  auto* fn_ctx = engine->sqlite_engine()->GetFunctionContext(
-      prototype.function_name, created_argc);
-  if (fn_ctx) {
+  auto* ctx = static_cast<CreatedFunction::Context*>(
+      engine->sqlite_engine()->GetFunctionContext(prototype.function_name,
+                                                  created_argc));
+  if (!ctx) {
+    // We register the function with SQLite before we prepare the statement so
+    // the statement can reference the function itself, enabling recursive
+    // calls.
+    std::unique_ptr<CreatedFunction::Context> created_fn_ctx =
+        std::make_unique<CreatedFunction::Context>(engine);
+    ctx = created_fn_ctx.get();
+    RETURN_IF_ERROR(engine->RegisterSqlFunction<CreatedFunction>(
+        function_name.c_str(), created_argc, std::move(created_fn_ctx)));
+  }
+  if (ctx->is_valid()) {
     // If the function already exists, just verify that the prototype, return
     // type and SQL matches exactly with what we already had registered. By
     // doing this, we can avoid the problem plaguing C++ macros where macro
     // ordering determines which one gets run.
-    auto* created_ctx = static_cast<CreatedFunction::Context*>(fn_ctx);
-    if (created_ctx->prototype != prototype) {
+    if (ctx->prototype() != prototype) {
       return base::ErrStatus(
           "CREATE_FUNCTION[prototype=%s]: function prototype changed",
           prototype_str.ToStdString().c_str());
     }
 
-    if (created_ctx->return_type != *opt_return_type) {
+    if (ctx->return_type() != *opt_return_type) {
       return base::ErrStatus(
           "CREATE_FUNCTION[prototype=%s]: return type changed from %s to %s",
           prototype_str.ToStdString().c_str(),
-          sql_argument::TypeToHumanFriendlyString(created_ctx->return_type),
+          sql_argument::TypeToHumanFriendlyString(ctx->return_type()),
           return_type_str.ToStdString().c_str());
     }
 
-    if (created_ctx->sql != sql_defn_str) {
+    if (ctx->sql() != sql_defn_str) {
       return base::ErrStatus(
           "CREATE_FUNCTION[prototype=%s]: function SQL changed from %s to %s",
-          prototype_str.ToStdString().c_str(), created_ctx->sql.c_str(),
+          prototype_str.ToStdString().c_str(), ctx->sql().c_str(),
           sql_defn_str.c_str());
     }
+
     return base::OkStatus();
   }
 
-  // Prepare the SQL definition as a statement using SQLite.
-  ScopedStmt stmt;
-  sqlite3_stmt* stmt_raw = nullptr;
-  int ret = sqlite3_prepare_v2(
-      engine->sqlite_engine()->db(), sql_defn_str.data(),
-      static_cast<int>(sql_defn_str.size()), &stmt_raw, nullptr);
-  if (ret != SQLITE_OK) {
+  ctx->Reset(std::move(prototype), prototype_str.ToStdString(),
+             *opt_return_type, std::move(sql_defn_str));
+
+  // Ideally, we would unregister the function here if the statement prep
+  // failed, but SQLite doesn't allow unregistering functions inside active
+  // statements. So instead we'll just try to prepare the statement when calling
+  // this function, which will return an error.
+  return ctx->PrepareStatement();
+}
+
+base::Status ExperimentalMemoize::Run(PerfettoSqlEngine* engine,
+                                      size_t argc,
+                                      sqlite3_value** argv,
+                                      SqlValue&,
+                                      Destructors&) {
+  RETURN_IF_ERROR(sqlite_utils::CheckArgCount("EXPERIMENTAL_MEMOIZE", argc, 1));
+  base::StatusOr<std::string> function_name =
+      sqlite_utils::ExtractStringArg("MEMOIZE", "function_name", argv[0]);
+  RETURN_IF_ERROR(function_name.status());
+
+  constexpr size_t kSupportedArgCount = 1;
+  CreatedFunction::Context* ctx = static_cast<CreatedFunction::Context*>(
+      engine->sqlite_engine()->GetFunctionContext(function_name->c_str(),
+                                                  kSupportedArgCount));
+  if (!ctx) {
     return base::ErrStatus(
-        "CREATE_FUNCTION[prototype=%s]: SQLite error when preparing "
-        "statement %s",
-        prototype_str.ToStdString().c_str(),
-        sqlite_utils::FormatErrorMessage(stmt_raw,
-                                         base::StringView(sql_defn_str),
-                                         engine->sqlite_engine()->db(), ret)
-            .c_message());
+        "EXPERIMENTAL_MEMOIZE: Function %s(INT) does not exist",
+        function_name->c_str());
   }
-  stmt.reset(stmt_raw);
-
-  std::string function_name = prototype.function_name;
-  std::unique_ptr<CreatedFunction::Context> created_fn_ctx(
-      new CreatedFunction::Context{engine, std::move(prototype),
-                                   *opt_return_type, std::move(sql_defn_str),
-                                   std::move(stmt)});
-  RETURN_IF_ERROR(engine->RegisterSqlFunction<CreatedFunction>(
-      function_name.c_str(), created_argc, std::move(created_fn_ctx)));
-
-  // CREATE_FUNCTION doesn't have a return value so just don't sent |out|.
-  return base::OkStatus();
+  return ctx->EnableMemoization();
 }
 
 }  // namespace trace_processor
diff --git a/src/trace_processor/prelude/functions/create_function.h b/src/trace_processor/prelude/functions/create_function.h
index ed8c06c..612abf1 100644
--- a/src/trace_processor/prelude/functions/create_function.h
+++ b/src/trace_processor/prelude/functions/create_function.h
@@ -44,6 +44,23 @@
                           Destructors&);
 };
 
+// Implementation of MEMOIZE SQL function.
+// SELECT EXPERIMENTAL_MEMOIZE('my_func') enables memoization for the results of
+// the calls to `my_func`. `my_func` must be a Perfetto SQL function created
+// through CREATE_FUNCTION that takes a single integer argument and returns a
+// int.
+struct ExperimentalMemoize : public SqlFunction {
+  using Context = PerfettoSqlEngine;
+
+  static constexpr bool kVoidReturn = true;
+
+  static base::Status Run(Context* ctx,
+                          size_t argc,
+                          sqlite3_value** argv,
+                          SqlValue& out,
+                          Destructors&);
+};
+
 }  // namespace trace_processor
 }  // namespace perfetto
 
diff --git a/src/trace_processor/prelude/functions/create_function_internal.cc b/src/trace_processor/prelude/functions/create_function_internal.cc
index 730d89a..1e27bed 100644
--- a/src/trace_processor/prelude/functions/create_function_internal.cc
+++ b/src/trace_processor/prelude/functions/create_function_internal.cc
@@ -92,5 +92,26 @@
   return base::OkStatus();
 }
 
+base::Status MaybeBindIntArgument(sqlite3_stmt* stmt,
+                                  const std::string& function_name,
+                                  const sql_argument::ArgumentDefinition& arg,
+                                  int64_t value) {
+  int index = sqlite3_bind_parameter_index(stmt, arg.dollar_name().c_str());
+
+  // If the argument is not in the query, this just means its an unused
+  // argument which we can just ignore.
+  if (index == 0)
+    return base::Status();
+
+  int ret = sqlite3_bind_int64(stmt, index, value);
+  if (ret != SQLITE_OK) {
+    return base::ErrStatus(
+        "%s: SQLite error while binding value to argument %s: %s",
+        function_name.c_str(), arg.name().c_str(),
+        sqlite3_errmsg(sqlite3_db_handle(stmt)));
+  }
+  return base::OkStatus();
+}
+
 }  // namespace trace_processor
 }  // namespace perfetto
diff --git a/src/trace_processor/prelude/functions/create_function_internal.h b/src/trace_processor/prelude/functions/create_function_internal.h
index 1f77459..f1b44ed 100644
--- a/src/trace_processor/prelude/functions/create_function_internal.h
+++ b/src/trace_processor/prelude/functions/create_function_internal.h
@@ -53,6 +53,11 @@
                                const sql_argument::ArgumentDefinition&,
                                sqlite3_value*);
 
+base::Status MaybeBindIntArgument(sqlite3_stmt*,
+                                  const std::string& function_name,
+                                  const sql_argument::ArgumentDefinition&,
+                                  int64_t);
+
 }  // namespace trace_processor
 }  // namespace perfetto
 
diff --git a/src/trace_processor/sqlite/sqlite_utils.cc b/src/trace_processor/sqlite/sqlite_utils.cc
index c4077a5..3afb3ea 100644
--- a/src/trace_processor/sqlite/sqlite_utils.cc
+++ b/src/trace_processor/sqlite/sqlite_utils.cc
@@ -175,6 +175,61 @@
   PERFETTO_FATAL("For GCC");
 }
 
+base::Status CheckArgCount(const char* function_name,
+                           size_t argc,
+                           size_t expected_argc) {
+  if (argc == expected_argc) {
+    return base::OkStatus();
+  }
+  return base::ErrStatus("%s: expected %zu arguments, got %zu", function_name,
+                         expected_argc, argc);
+}
+
+base::StatusOr<int64_t> ExtractIntArg(const char* function_name,
+                                      const char* arg_name,
+                                      sqlite3_value* sql_value) {
+  SqlValue value = SqliteValueToSqlValue(sql_value);
+  std::optional<int64_t> result;
+
+  base::Status status = ExtractFromSqlValue(value, result);
+  if (!status.ok()) {
+    return base::ErrStatus("%s(%s): %s", function_name, arg_name,
+                           status.message().c_str());
+  }
+  PERFETTO_CHECK(result);
+  return *result;
+}
+
+base::StatusOr<double> ExtractDoubleArg(const char* function_name,
+                                        const char* arg_name,
+                                        sqlite3_value* sql_value) {
+  SqlValue value = SqliteValueToSqlValue(sql_value);
+  std::optional<double> result;
+
+  base::Status status = ExtractFromSqlValue(value, result);
+  if (!status.ok()) {
+    return base::ErrStatus("%s(%s): %s", function_name, arg_name,
+                           status.message().c_str());
+  }
+  PERFETTO_CHECK(result);
+  return *result;
+}
+
+base::StatusOr<std::string> ExtractStringArg(const char* function_name,
+                                             const char* arg_name,
+                                             sqlite3_value* sql_value) {
+  SqlValue value = SqliteValueToSqlValue(sql_value);
+  std::optional<const char*> result;
+
+  base::Status status = ExtractFromSqlValue(value, result);
+  if (!status.ok()) {
+    return base::ErrStatus("%s(%s): %s", function_name, arg_name,
+                           status.message().c_str());
+  }
+  PERFETTO_CHECK(result);
+  return std::string(*result);
+}
+
 base::Status TypeCheckSqliteValue(sqlite3_value* value,
                                   SqlValue::Type expected_type) {
   return TypeCheckSqliteValue(value, expected_type,
diff --git a/src/trace_processor/sqlite/sqlite_utils.h b/src/trace_processor/sqlite/sqlite_utils.h
index ef44c45..d3ccfc5 100644
--- a/src/trace_processor/sqlite/sqlite_utils.h
+++ b/src/trace_processor/sqlite/sqlite_utils.h
@@ -256,6 +256,24 @@
 // This should really only be used for debugging messages.
 const char* SqliteTypeToFriendlyString(SqlValue::Type type);
 
+// Verifies if |argc| matches |expected_argc| and returns an appropriate error
+// message if they don't match.
+base::Status CheckArgCount(const char* function_name,
+                           size_t argc,
+                           size_t expected_argc);
+
+// Type-safe helpers to extract an arg value from a sqlite3_value*, returning an
+// appropriate message if it fails.
+base::StatusOr<int64_t> ExtractIntArg(const char* function_name,
+                                      const char* arg_name,
+                                      sqlite3_value* value);
+base::StatusOr<double> ExtractDoubleArg(const char* function_name,
+                                        const char* arg_name,
+                                        sqlite3_value* value);
+base::StatusOr<std::string> ExtractStringArg(const char* function_name,
+                                             const char* arg_name,
+                                             sqlite3_value* value);
+
 // Verifies if |value| has the type represented by |expected_type|.
 // Returns base::OkStatus if it does or an base::ErrStatus with an
 // appropriate error mesage (incorporating |expected_type_str| if specified).
diff --git a/src/trace_processor/stdlib/chrome/chrome_scrolls.sql b/src/trace_processor/stdlib/chrome/chrome_scrolls.sql
index 1ad7f31..31522c1 100644
--- a/src/trace_processor/stdlib/chrome/chrome_scrolls.sql
+++ b/src/trace_processor/stdlib/chrome/chrome_scrolls.sql
@@ -28,7 +28,7 @@
 -- WebView instances. Currently gesture_scroll_id unique within an instance, but
 -- is not unique across multiple instances. Switching to an EventLatency based
 -- definition of scrolls should resolve this.
-CREATE VIEW chrome_scrolls AS
+CREATE TABLE chrome_scrolls AS
 WITH all_scrolls AS (
   SELECT
     name,
diff --git a/src/trace_processor/trace_processor_impl.cc b/src/trace_processor/trace_processor_impl.cc
index 800d6d6..9b4c09b 100644
--- a/src/trace_processor/trace_processor_impl.cc
+++ b/src/trace_processor/trace_processor_impl.cc
@@ -384,6 +384,18 @@
     context_.content_analyzer.reset(new ProtoContentAnalyzer(&context_));
   }
 
+  auto v2 = context_.config.dev_flags.find("enable_db2_filtering");
+  if (v2 != context_.config.dev_flags.end()) {
+    if (v2->second == "true") {
+      Table::kUseFilterV2 = true;
+    } else if (v2->second == "false") {
+      Table::kUseFilterV2 = false;
+    } else {
+      PERFETTO_ELOG("Unknown value for enable_db2_filtering %s",
+                    v2->second.c_str());
+    }
+  }
+
   sqlite3_str_split_init(engine_.sqlite_engine()->db());
   RegisterAdditionalModules(&context_);
 
@@ -405,6 +417,8 @@
   RegisterFunction<ToMonotonic>(&engine_, "TO_MONOTONIC", 1,
                                 context_.clock_converter.get());
   RegisterFunction<CreateFunction>(&engine_, "CREATE_FUNCTION", 3, &engine_);
+  RegisterFunction<ExperimentalMemoize>(&engine_, "EXPERIMENTAL_MEMOIZE", 1,
+                                        &engine_);
   RegisterFunction<CreateViewFunction>(
       &engine_, "CREATE_VIEW_FUNCTION", 3,
       std::unique_ptr<CreateViewFunction::Context>(
diff --git a/src/trace_processor/trace_processor_shell.cc b/src/trace_processor/trace_processor_shell.cc
index 44df9a9..5d86b7c 100644
--- a/src/trace_processor/trace_processor_shell.cc
+++ b/src/trace_processor/trace_processor_shell.cc
@@ -709,12 +709,13 @@
   bool no_ftrace_raw = false;
   bool analyze_trace_proto_content = false;
   bool crop_track_events = false;
+  std::vector<std::string> dev_flags;
 };
 
 void PrintUsage(char** argv) {
   PERFETTO_ELOG(R"(
 Interactive trace processor shell.
-Usage: %s [OPTIONS] trace_file.pb
+Usage: %s [FLAGS] trace_file.pb
 
 Options:
  -h, --help                           Prints this guide.
@@ -739,12 +740,8 @@
  -e, --export FILE                    Export the contents of trace processor
                                       into an SQLite database after running any
                                       metrics or queries specified.
- -m, --metatrace FILE                 Enables metatracing of trace processor
-                                      writing the resulting trace into FILE.
- --metatrace-buffer-capacity N        Sets metatrace event buffer to capture
-                                      last N events.
- --metatrace-categories CATEGORIES    A comma-separated list of metatrace
-                                      categories to enable.
+
+Feature flags:
  --full-sort                          Forces the trace processor into performing
                                       a full sort ignoring any windowing
                                       logic.
@@ -762,6 +759,9 @@
                                       *should not* be enabled on production
                                       builds. The features behind this flag can
                                       break at any time without any warning.
+ --dev-flag KEY=VALUE                 Set a development flag to the given value.
+                                      Does not have any affect unless --dev is
+                                      specified.
 
 Standard library:
  --add-sql-module MODULE_PATH         Files from the directory will be treated
@@ -794,7 +794,15 @@
                                       Loads metric proto and sql files from
                                       DISK_PATH/protos and DISK_PATH/sql
                                       respectively, and mounts them onto
-                                      VIRTUAL_PATH.)",
+                                      VIRTUAL_PATH.
+
+Metatracing:
+ -m, --metatrace FILE                 Enables metatracing of trace processor
+                                      writing the resulting trace into FILE.
+ --metatrace-buffer-capacity N        Sets metatrace event buffer to capture
+                                      last N events.
+ --metatrace-categories CATEGORIES    A comma-separated list of metatrace
+                                      categories to enable.)",
                 argv[0]);
 }
 
@@ -816,6 +824,7 @@
     OPT_METATRACE_CATEGORIES,
     OPT_ANALYZE_TRACE_PROTO_CONTENT,
     OPT_CROP_TRACK_EVENTS,
+    OPT_DEV_FLAG,
   };
 
   static const option long_options[] = {
@@ -848,6 +857,7 @@
       {"pre-metrics", required_argument, nullptr, OPT_PRE_METRICS},
       {"metrics-output", required_argument, nullptr, OPT_METRICS_OUTPUT},
       {"metric-extension", required_argument, nullptr, OPT_METRIC_EXTENSION},
+      {"dev-flag", required_argument, nullptr, OPT_DEV_FLAG},
       {nullptr, 0, nullptr, 0}};
 
   bool explicit_interactive = false;
@@ -986,6 +996,11 @@
       continue;
     }
 
+    if (option == OPT_DEV_FLAG) {
+      command_line_options.dev_flags.push_back(optarg);
+      continue;
+    }
+
     PrintUsage(argv);
     exit(option == 'h' ? 0 : 1);
   }
@@ -1573,6 +1588,14 @@
 
   if (options.dev) {
     config.enable_dev_features = true;
+    for (const auto& flag_pair : options.dev_flags) {
+      auto kv = base::SplitString(flag_pair, "=");
+      if (kv.size() != 2) {
+        PERFETTO_ELOG("Ignoring unknown dev flag format %s", flag_pair.c_str());
+        continue;
+      }
+      config.dev_flags.emplace(kv[0], kv[1]);
+    }
   }
 
   std::unique_ptr<TraceProcessor> tp = TraceProcessor::CreateInstance(config);
@@ -1634,7 +1657,6 @@
     RETURN_IF_ERROR(RunQueries(options.pre_metrics_path, false));
   }
 
-
   std::vector<MetricNameAndPath> metrics;
   if (!options.metric_names.empty()) {
     RETURN_IF_ERROR(LoadMetrics(options.metric_names, pool, metrics));
diff --git a/test/data/ui-screenshots/ui-chrome_rendering_desktop_select_slice_with_flows.png.sha256 b/test/data/ui-screenshots/ui-chrome_rendering_desktop_select_slice_with_flows.png.sha256
index 0332663..02705ed 100644
--- a/test/data/ui-screenshots/ui-chrome_rendering_desktop_select_slice_with_flows.png.sha256
+++ b/test/data/ui-screenshots/ui-chrome_rendering_desktop_select_slice_with_flows.png.sha256
@@ -1 +1 @@
-b841b628cd3908f434d28ad0234656ceaeb4a418659200e2670343d2f885fa68
\ No newline at end of file
+c29b529b5bbd318e0df531f9118a78c8cce94004cc77a887cd394b3093d135c0
\ No newline at end of file
diff --git a/test/trace_processor/diff_tests/android/android_network_activity.out b/test/trace_processor/diff_tests/android/android_network_activity.out
index 14418e4..eca9f86 100644
--- a/test/trace_processor/diff_tests/android/android_network_activity.out
+++ b/test/trace_processor/diff_tests/android/android_network_activity.out
@@ -1,4 +1,4 @@
 "package_name","ts","dur","packet_count","packet_length"
 "uid=123",1000,1010,2,100
 "uid=123",3000,2500,4,200
-"uid=456",1005,1010,2,300
+"uid=456",1005,2110,3,350
diff --git a/test/trace_processor/diff_tests/android/tests.py b/test/trace_processor/diff_tests/android/tests.py
index 2dc6615..7d683cf 100644
--- a/test/trace_processor/diff_tests/android/tests.py
+++ b/test/trace_processor/diff_tests/android/tests.py
@@ -199,7 +199,8 @@
   def test_android_network_activity(self):
     # The following should have three activity regions:
     # * uid=123 from 1000 to 2010 (note: end is max(ts)+idle_ns)
-    # * uid=456 from 1005 to 2015 (note: doesn't group with above due to name)
+    # * uid=456 from 1005 to 3115 (note: doesn't group with above due to name)
+    #   * Also tests that groups form based on (ts+dur), not just start ts.
     # * uid=123 from 3000 to 5500 (note: gap between 1010 to 3000 > idle_ns)
     # Note: packet_timestamps are delta encoded from the base timestamp.
     return DiffTestBlueprint(
@@ -223,15 +224,29 @@
           }
         }
         packet {
-          timestamp: 0
+          timestamp: 1005
           network_packet_bundle {
             ctx {
               direction: DIR_EGRESS
               interface: "wlan"
               uid: 456
             }
-            packet_timestamps: [1005, 1015]
-            packet_lengths: [100, 200]
+            total_duration: 100
+            total_packets: 2
+            total_length: 300
+          }
+        }
+        packet {
+          timestamp: 2015
+          network_packet_bundle {
+            ctx {
+              direction: DIR_EGRESS
+              interface: "wlan"
+              uid: 456
+            }
+            total_duration: 100
+            total_packets: 1
+            total_length: 50
           }
         }
         packet {
diff --git a/test/trace_processor/diff_tests/functions/tests.py b/test/trace_processor/diff_tests/functions/tests.py
index e5dc15c..c5f7918 100644
--- a/test/trace_processor/diff_tests/functions/tests.py
+++ b/test/trace_processor/diff_tests/functions/tests.py
@@ -39,6 +39,203 @@
 
 
 class Functions(TestSuite):
+
+  def test_create_function(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        SELECT create_function('f(x INT)', 'INT', 'SELECT $x + 1');
+
+        SELECT f(5) as result;
+      """,
+        out=Csv("""
+        "result"
+        6
+      """))
+
+  def test_create_function_returns_string(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        SELECT create_function('f(x INT)', 'STRING', 'SELECT "value_" || $x');
+
+        SELECT f(5) as result;
+      """,
+        out=Csv("""
+        "result"
+        "value_5"
+      """))
+
+  def test_create_function_duplicated(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        SELECT create_function('f()', 'INT', 'SELECT 1');
+        SELECT create_function('f()', 'INT', 'SELECT 1');
+
+        SELECT f() as result;
+      """,
+        out=Csv("""
+        "result"
+        1
+      """))
+
+  def test_create_function_recursive(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        -- Compute factorial.
+        SELECT create_function('f(x INT)', 'INT',
+        '
+          SELECT IIF($x = 0, 1, $x * f($x - 1))
+        ');
+
+        SELECT f(5) as result;
+      """,
+        out=Csv("""
+        "result"
+        120
+      """))
+
+  def test_create_function_recursive_string(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        -- Compute factorial.
+        SELECT create_function('f(x INT)', 'STRING',
+        '
+          SELECT IIF(
+            $x = 0,
+            "",
+            -- 97 is the ASCII code for "a".
+            f($x - 1) || char(96 + $x) || f($x - 1))
+        ');
+
+        SELECT f(4) as result;
+      """,
+        out=Csv("""
+          "result"
+          "abacabadabacaba"
+      """))
+
+  def test_create_function_memoize(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        -- Compute 2^n inefficiently to test memoization.
+        -- If it times out, memoization is not working.
+        SELECT create_function('f(x INT)', 'INT',
+        '
+          SELECT IIF($x = 0, 1, f($x - 1) + f($x - 1))
+        ');
+
+        SELECT EXPERIMENTAL_MEMOIZE('f');
+
+        -- 2^50 is too expensive to compute, but memoization makes it fast.
+        SELECT f(50) as result;
+      """,
+        out=Csv("""
+        "result"
+        1125899906842624
+      """))
+
+  def test_create_function_memoize_intermittent_memoization(self):
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        -- This function returns NULL for odd numbers and 1 for even numbers.
+        -- As we do not memoize NULL results, we would only memoize the results
+        -- for even numbers.
+        SELECT create_function('f(x INT)', 'INT',
+        '
+          SELECT IIF($x = 0, 1,
+            IIF(f($x - 1) IS NULL, 1, NULL)
+          )
+        ');
+
+        SELECT EXPERIMENTAL_MEMOIZE('f');
+
+        SELECT
+          f(50) as f_50,
+          f(51) as f_51;
+      """,
+        out=Csv("""
+        "f_50","f_51"
+        1,"[NULL]"
+      """))
+
+  def test_create_function_memoize_subtree_size(self):
+    # Tree:
+    #            1
+    #           / \
+    #          /   \
+    #         /     \
+    #        2       3
+    #       / \     / \
+    #      4   5   6   7
+    #     / \  |   |  | \
+    #    8   9 10 11 12 13
+    #    |   |
+    #   14   15
+    return DiffTestBlueprint(
+        trace=TextProto(""),
+        query="""
+        CREATE TABLE tree AS
+        WITH data(id, parent_id) as (VALUES
+          (1, NULL),
+          (2, 1),
+          (3, 1),
+          (4, 2),
+          (5, 2),
+          (6, 3),
+          (7, 3),
+          (8, 4),
+          (9, 4),
+          (10, 5),
+          (11, 6),
+          (12, 7),
+          (13, 7),
+          (14, 8),
+          (15, 9)
+        )
+        SELECT * FROM data;
+
+        SELECT create_function('subtree_size(id INT)', 'INT',
+        '
+          SELECT 1 + IFNULL((
+            SELECT
+              SUM(subtree_size(child.id))
+            FROM tree child
+            WHERE child.parent_id = $id
+          ), 0)
+        ');
+
+        SELECT EXPERIMENTAL_MEMOIZE('subtree_size');
+
+        SELECT
+          id, subtree_size(id) as size
+        FROM tree
+        ORDER BY id;
+      """,
+        out=Csv("""
+        "id","size"
+        1,15
+        2,8
+        3,6
+        4,5
+        5,2
+        6,2
+        7,3
+        8,2
+        9,2
+        10,1
+        11,1
+        12,1
+        13,1
+        14,1
+        15,1
+      """))
+
   def test_first_non_null_frame(self):
     return DiffTestBlueprint(
         trace=TextProto(r"""
diff --git a/tools/record_android_trace b/tools/record_android_trace
index a9db452..6d6a46c 100755
--- a/tools/record_android_trace
+++ b/tools/record_android_trace
@@ -351,12 +351,17 @@
 class HttpHandler(http.server.SimpleHTTPRequestHandler):
 
   def end_headers(self):
-    self.send_header('Access-Control-Allow-Origin', '*')
-    return super().end_headers()
+    self.send_header('Access-Control-Allow-Origin', self.server.allow_origin)
+    self.send_header('Cache-Control', 'no-cache')
+    super().end_headers()
 
   def do_GET(self):
-    self.server.last_request = self.path
-    return super().do_GET()
+    if self.path != '/' + self.server.expected_fname:
+      self.send_error(404, "File not found")
+      return
+
+    self.server.fname_get_completed = True
+    super().do_GET()
 
   def do_POST(self):
     self.send_error(404, "File not found")
@@ -382,9 +387,15 @@
   help = 'Output file or directory (default: %s)' % default_out_dir_str
   parser.add_argument('-o', '--out', default=default_out_dir, help=help)
 
-  help = 'Don\'t open in the browser'
+  help = 'Don\'t open or serve the trace'
   parser.add_argument('-n', '--no-open', action='store_true', help=help)
 
+  help = 'Don\'t open in browser, but still serve trace (good for remote use)'
+  parser.add_argument('--no-open-browser', action='store_true', help=help)
+
+  help = 'The web address used to open trace files'
+  parser.add_argument('--origin', default='https://ui.perfetto.dev', help=help)
+
   help = 'Force the use of the sideloaded binaries rather than system daemons'
   parser.add_argument('--sideload', action='store_true', help=help)
 
@@ -435,6 +446,9 @@
   help = 'Can be generated with https://ui.perfetto.dev/#!/record'
   grp.add_argument('-c', '--config', default=None, help=help)
 
+  help = 'Parse input from --config as binary proto (default: parse as text)'
+  grp.add_argument('--bin', action='store_true', help=help)
+
   args = parser.parse_args()
   args.sideload = args.sideload or args.sideload_path is not None
 
@@ -507,7 +521,9 @@
   fname = '%s-%s.pftrace' % (tstamp, os.urandom(3).hex())
   device_file = device_dir + fname
 
-  cmd = [perfetto_cmd, '--background', '--txt', '-o', device_file]
+  cmd = [perfetto_cmd, '--background', '-o', device_file]
+  if not args.bin:
+    cmd.append('--txt')
   on_device_config = None
   on_host_config = None
   if args.config is not None:
@@ -627,7 +643,8 @@
   if not args.no_open:
     prt('\n')
     prt('Opening the trace (%s) in the browser' % host_file)
-    open_trace_in_browser(host_file)
+    open_browser = not args.no_open_browser
+    open_trace_in_browser(host_file, open_browser, args.origin)
 
 
 def prt(msg, colors=ANSI.END):
@@ -655,17 +672,24 @@
     sys.exit(1)
 
 
-def open_trace_in_browser(path):
+def open_trace_in_browser(path, open_browser, origin):
   # We reuse the HTTP+RPC port because it's the only one allowed by the CSP.
   PORT = 9001
+  path = os.path.abspath(path)
   os.chdir(os.path.dirname(path))
   fname = os.path.basename(path)
   socketserver.TCPServer.allow_reuse_address = True
   with socketserver.TCPServer(('127.0.0.1', PORT), HttpHandler) as httpd:
-    webbrowser.open_new_tab(
-        'https://ui.perfetto.dev/#!/?url=http://127.0.0.1:%d/%s' %
-        (PORT, fname))
-    while httpd.__dict__.get('last_request') != '/' + fname:
+    address = f'{origin}/#!/?url=http://127.0.0.1:{PORT}/{fname}'
+    if open_browser:
+      webbrowser.open_new_tab(address)
+    else:
+      print(f'Open URL in browser: {address}')
+
+    httpd.expected_fname = fname
+    httpd.fname_get_completed = None
+    httpd.allow_origin = origin
+    while httpd.fname_get_completed is None:
       httpd.handle_request()
 
 
diff --git a/ui/release/channels.json b/ui/release/channels.json
index 09f14f4..0dcf5cb 100644
--- a/ui/release/channels.json
+++ b/ui/release/channels.json
@@ -6,7 +6,7 @@
     },
     {
       "name": "canary",
-      "rev": "a34bc46479e2e659532f7e208c6eba5462c26bae"
+      "rev": "5f456dbc00731d4dff20ae4e395cf06f96374a2d"
     },
     {
       "name": "autopush",
diff --git a/ui/src/assets/details.scss b/ui/src/assets/details.scss
index 50e0d20..9a1fd7f 100644
--- a/ui/src/assets/details.scss
+++ b/ui/src/assets/details.scss
@@ -253,42 +253,41 @@
   .auto-layout {
     table-layout: auto;
   }
+}
 
-  .slice-details-latency-panel {
-    // This panel is set to relative to make this panel a positioned element
-    // This is to allow the absolute text panels below to be positioned relative
-    // to this panel and not our parent.
-    position: relative;
-    font-size: 13px;
-    user-select: text;
+.slice-details-latency-panel {
+  // This panel is set to relative to make this panel a positioned element
+  // This is to allow the absolute text panels below to be positioned relative
+  // to this panel and not our parent.
+  position: relative;
+  user-select: text;
 
-    .text-detail {
-      font-size: 10px;
-    }
+  .text-detail {
+    font-size: smaller;
+  }
 
-    .slice-details-wakeup-text {
-      position: absolute;
-      left: 40px;
-      top: 20px;
-    }
+  .slice-details-wakeup-text {
+    position: absolute;
+    left: 40px;
+    top: 20px;
+  }
 
-    .slice-details-latency-text {
-      position: absolute;
-      left: 106px;
-      top: 90px;
-    }
+  .slice-details-latency-text {
+    position: absolute;
+    left: 106px;
+    top: 90px;
+  }
 
-    .slice-details-image {
-      user-select: none;
-      width: 360px;
-      height: 300px;
-    }
+  .slice-details-image {
+    user-select: none;
+    width: 180px;
+    height: 150px;
   }
 }
 
 .details-table-multicolumn {
   display: flex;
-  user-select: 'text';
+  user-select: "text";
 }
 
 .flow-link:hover {
diff --git a/ui/src/assets/perfetto.scss b/ui/src/assets/perfetto.scss
index e4da147..8e5afe6 100644
--- a/ui/src/assets/perfetto.scss
+++ b/ui/src/assets/perfetto.scss
@@ -39,3 +39,6 @@
 @import "widgets/tree";
 @import "widgets/switch";
 @import "widgets/form";
+@import "widgets/details_shell";
+@import "widgets/grid_layout";
+@import "widgets/section";
diff --git a/ui/src/assets/scheduling_latency.png b/ui/src/assets/scheduling_latency.png
index 2a28074..36bcfb8 100644
--- a/ui/src/assets/scheduling_latency.png
+++ b/ui/src/assets/scheduling_latency.png
Binary files differ
diff --git a/ui/src/assets/widgets/anchor.scss b/ui/src/assets/widgets/anchor.scss
index 0d54a00..b78695e 100644
--- a/ui/src/assets/widgets/anchor.scss
+++ b/ui/src/assets/widgets/anchor.scss
@@ -29,7 +29,7 @@
     // For some reason, floating this icon results in the most pleasing vertical
     // alignment.
     float: right;
-    margin: 0 0 0 2px;
+    margin: 0 0 0 0px;
     font-size: inherit;
     line-height: inherit;
     color: inherit;
diff --git a/ui/src/assets/widgets/button.scss b/ui/src/assets/widgets/button.scss
index b42115b..61eeb50 100644
--- a/ui/src/assets/widgets/button.scss
+++ b/ui/src/assets/widgets/button.scss
@@ -87,6 +87,13 @@
   // Reduce padding when compact
   &.pf-compact {
     padding: 2px 4px;
+    & > .pf-left-icon {
+      margin-right: 2px;
+    }
+
+    & > .pf-right-icon {
+      margin-left: 2px;
+    }
   }
 
   // Reduce padding when we are icon-only
@@ -95,7 +102,7 @@
       margin: 0;
     }
 
-    padding: 4px 4px;
+    padding: 4px;
 
     &.pf-compact {
       padding: 0;
diff --git a/ui/src/assets/widgets/details_shell.scss b/ui/src/assets/widgets/details_shell.scss
new file mode 100644
index 0000000..75c6af9
--- /dev/null
+++ b/ui/src/assets/widgets/details_shell.scss
@@ -0,0 +1,109 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+@import "theme";
+
+.pf-details-shell {
+  font-family: $pf-font;
+  display: flex;
+  flex-direction: column;
+  min-height: 100%;
+
+  &.pf-match-parent {
+    height: 100%;
+  }
+
+  .pf-header-bar {
+    z-index: 1; // HACK: Make the header bar appear above the content
+    position: sticky;
+    top: 0;
+    left: 0;
+    display: flex;
+    flex-direction: row;
+    align-items: baseline;
+    gap: 6px;
+    background-color: white;
+    color: black;
+    padding: 8px 8px 5px 8px;
+    box-shadow: 0px 1px 4px rgba(0, 0, 0, 0.2);
+    border-bottom: 1px solid rgba(0, 0, 0, 0.2);
+
+    .pf-header-title {
+      font-size: 18px;
+      min-width: min-content;
+      white-space: nowrap;
+    }
+
+    .pf-header-description {
+      font-size: 14px;
+      flex-grow: 1;
+      flex-shrink: 1;
+      white-space: nowrap;
+      text-overflow: ellipsis;
+      overflow: hidden;
+    }
+
+    .pf-header-buttons {
+      display: flex;
+      min-width: min-content;
+      gap: 4px;
+    }
+  }
+
+  .pf-content {
+    font-size: smaller;
+    flex-grow: 1;
+    font-weight: 300;
+
+    table {
+      @include transition(0.1s);
+      @include table-font-size;
+      width: 100%;
+      // Aggregation panel uses multiple table elements that need to be aligned,
+      // which is done by using fixed table layout.
+      table-layout: fixed;
+      word-wrap: break-word;
+      padding: 0 10px;
+      tr:hover {
+        td,
+        th {
+          background-color: $table-hover-color;
+
+          &.no-highlight {
+            background-color: white;
+          }
+        }
+      }
+      th {
+        text-align: left;
+        width: 30%;
+        font-weight: normal;
+        vertical-align: top;
+      }
+      td.value {
+        white-space: pre-wrap;
+      }
+      td.padding {
+        min-width: 10px;
+      }
+      .array-index {
+        text-align: right;
+      }
+    }
+
+    .auto-layout {
+      table-layout: auto;
+    }
+  }
+}
diff --git a/ui/src/assets/widgets/grid_layout.scss b/ui/src/assets/widgets/grid_layout.scss
new file mode 100644
index 0000000..3be0b0c
--- /dev/null
+++ b/ui/src/assets/widgets/grid_layout.scss
@@ -0,0 +1,35 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+@import "theme";
+
+$pf-responsive-gutter: 8px;
+
+.pf-grid-layout {
+  display: grid;
+  gap: $pf-responsive-gutter;
+  grid-template-columns: repeat(auto-fit, minmax(350px, 1fr));
+  margin: $pf-responsive-gutter;
+
+  align-items: start;
+
+  & > .pf-column {
+    display: flex;
+    flex-direction: column;
+    gap: $pf-responsive-gutter;
+  }
+  td {
+    word-break: break-all;
+  }
+}
diff --git a/ui/src/assets/widgets/section.scss b/ui/src/assets/widgets/section.scss
new file mode 100644
index 0000000..3dc4a0e
--- /dev/null
+++ b/ui/src/assets/widgets/section.scss
@@ -0,0 +1,31 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+@import "theme";
+
+.pf-section {
+  border-radius: $pf-border-radius;
+  border: 1px solid rgba(0, 0, 0, 0.2);
+  header {
+    padding: 6px;
+    border-bottom: 1px solid rgba(0, 0, 0, 0.1);
+    h1 {
+      font-size: larger;
+      font-weight: bolder;
+    }
+  }
+  article {
+    padding: 6px;
+  }
+}
diff --git a/ui/src/assets/widgets/tree.scss b/ui/src/assets/widgets/tree.scss
index 569ad9a..83073c9 100644
--- a/ui/src/assets/widgets/tree.scss
+++ b/ui/src/assets/widgets/tree.scss
@@ -1,126 +1,94 @@
 @import "theme";
 
-$indent: 20px;
+$chevron-svg: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' height='8' width='8'%3E%3Cline x1='2' y1='0' x2='6' y2='4' stroke='black'/%3E%3Cline x1='6' y1='4' x2='2' y2='8' stroke='black'/%3E%3C/svg%3E");
 
-.pf-tree-left {
-  min-width: max-content;
-  padding: 2px 8px 2px 4px;
-  font-weight: 600;
-}
-
-.pf-tree-right {
-  padding: 2px 4px;
-}
-
-// In tree mode, the values and keys are represented simply using a nested set
-// of divs, where each child is pushed in with a left margin which creates the
-// effect of indenting nested subtrees.
-.pf-ptree {
-  .pf-tree-children {
-    padding-left: $indent;
-    border-left: dotted 1px gray;
-  }
-
-  .pf-tree-node {
-    display: grid;
-    width: max-content;
-    grid-template-columns: [left]auto [right]1fr;
-    border-radius: $pf-border-radius;
-
-    &:hover {
-      background: lightgray;
-    }
-
-    .pf-tree-left {
-      grid-column: left;
-      &:after {
-        content: ":";
-        font-weight: 600;
-        padding-left: 4px;
-        padding-right: 8px;
-      }
-    }
-
-    .pf-tree-right {
-      grid-column: right;
-    }
-  }
-}
-
-// In grid mode, right elements should be horizontally aligned, regardless
-// of indentation level.
-// "Subgrid" is a convenient tool for aligning nested grids to an outer grid's
-// columns, but it is not supported in Chrome as of March 2023.
-// See https://caniuse.com/css-subgrid
-// See https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Grid_Layout/Subgrid
-//
-// For future reference - this is what a subgrid implementation might look like:
-//
-// .pf-ptree-grid {
-//   display: grid;
-//   grid-template-columns: auto 1fr;
-//
-//   .pf-tree-children {
-//     display: grid;
-//     grid-column: span 2;
-//     grid-template-columns: subgrid;
-//     padding-left: $indent;
-//     border-left: dotted 1px gray;
-//   }
-
-//   .pf-tree-node {
-//     display: grid;
-//     grid-column: span 2;
-//     grid-template-columns: subgrid;
-//     width: max-content;
-//     border-radius: $pf-border-radius;
-
-//     &:hover {
-//       background: lightgray;
-//     }
-//   }
-// }
-
-@mixin indentation($max, $level) {
-  @if $level <= $max {
-    .pf-tree-children {
-      .pf-tree-left {
-        margin-left: $level * $indent;
-      }
-      @include indentation($max, $level + 1);
-    }
-  }
-}
-
-.pf-ptree-grid {
+@mixin grid {
   display: grid;
-  grid-template-columns: auto 1fr;
-
-  .pf-tree-children {
-    display: contents;
-  }
-
-  .pf-tree-node {
-    display: contents;
-
-    &:hover {
-      background: lightgray;
-    }
-
-    .pf-tree-left {
-      background: inherit;
-      border-radius: $pf-border-radius 0 0 $pf-border-radius;
-    }
-
-    .pf-tree-right {
-      background: inherit;
-      border-radius: 0 $pf-border-radius $pf-border-radius 0;
-    }
-  }
-
-  @include indentation(16, 1);
+  grid-template-columns: [gutter]auto [left]auto [right]1fr;
+  row-gap: 5px;
 }
 
-.pf-tree-children.pf-pgrid-hidden {
-  display: none;
+.pf-tree {
+  font-family: $pf-font;
+  @include grid;
+  .pf-tree-node {
+    display: contents;
+    .pf-tree-content {
+      display: contents;
+      &:hover {
+        background: $table-hover-color;
+      }
+      .pf-tree-left {
+        background: inherit;
+        min-width: max-content;
+        border-radius: $pf-border-radius 0 0 $pf-border-radius;
+        font-weight: bolder;
+      }
+      .pf-tree-right {
+        background: inherit;
+        padding: 0 0 0 15px;
+        border-radius: 0 $pf-border-radius $pf-border-radius 0;
+        word-break: break-all;
+        white-space: pre-wrap;
+      }
+    }
+    .pf-tree-gutter {
+      display: block;
+      position: relative;
+    }
+    &.pf-collapsed > .pf-tree-gutter {
+      cursor: pointer;
+      width: 16px;
+      display: flex;
+      justify-content: center;
+      align-items: center;
+      &::after {
+        content: $chevron-svg;
+      }
+    }
+    &.pf-expanded > .pf-tree-gutter {
+      cursor: pointer;
+      width: 16px;
+      display: flex;
+      justify-content: center;
+      align-items: center;
+      &::after {
+        content: $chevron-svg;
+        rotate: 90deg;
+      }
+    }
+    &.pf-loading > .pf-tree-gutter {
+      width: 16px;
+      display: flex;
+      justify-content: center;
+      align-items: center;
+      &::after {
+        content: "";
+        border: solid 1px lightgray;
+        border-top: solid 1px $pf-primary-background;
+        animation: pf-spinner-rotation 1s infinite linear;
+        width: 8px;
+        height: 8px;
+        border-radius: 50%;
+      }
+    }
+    .pf-tree-indent-gutter {
+      display: block;
+      position: relative;
+    }
+    .pf-tree-children {
+      grid-column: 2 / span 2;
+      @include grid;
+      .pf-tree-gutter {
+        // Nested gutters are always present, to provide indentation
+        width: 16px;
+      }
+    }
+    &.pf-collapsed > .pf-tree-children {
+      display: none;
+    }
+    &.pf-collapsed > .pf-tree-indent-gutter {
+      display: none;
+    }
+  }
 }
diff --git a/ui/src/common/actions.ts b/ui/src/common/actions.ts
index 7b8f462..7050e4e 100644
--- a/ui/src/common/actions.ts
+++ b/ui/src/common/actions.ts
@@ -126,6 +126,9 @@
 // tracks are removeable.
 function removeTrack(state: StateDraft, trackId: string) {
   const track = state.tracks[trackId];
+  if (track === undefined) {
+    return;
+  }
   delete state.tracks[trackId];
 
   const removeTrackId = (arr: string[]) => {
@@ -136,7 +139,10 @@
   if (track.trackGroup === SCROLLING_TRACK_GROUP) {
     removeTrackId(state.scrollingTracks);
   } else if (track.trackGroup !== undefined) {
-    removeTrackId(state.trackGroups[track.trackGroup].tracks);
+    const trackGroup = state.trackGroups[track.trackGroup];
+    if (trackGroup !== undefined) {
+      removeTrackId(trackGroup.tracks);
+    }
   }
   state.pinnedTracks = state.pinnedTracks.filter((id) => id !== trackId);
 }
@@ -227,7 +233,10 @@
       if (track.trackGroup === SCROLLING_TRACK_GROUP) {
         state.scrollingTracks.push(id);
       } else if (track.trackGroup !== undefined) {
-        assertExists(state.trackGroups[track.trackGroup]).tracks.push(id);
+        const group = state.trackGroups[track.trackGroup];
+        if (group !== undefined) {
+          group.tracks.push(id);
+        }
       }
     });
   },
@@ -296,8 +305,10 @@
 
   removeDebugTrack(state: StateDraft, args: {trackId: string}): void {
     const track = state.tracks[args.trackId];
-    assertTrue(track.kind === DEBUG_SLICE_TRACK_KIND);
-    removeTrack(state, args.trackId);
+    if (track !== undefined) {
+      assertTrue(track.kind === DEBUG_SLICE_TRACK_KIND);
+      removeTrack(state, args.trackId);
+    }
   },
 
   removeVisualisedArgTracks(state: StateDraft, args: {trackIds: string[]}) {
diff --git a/ui/src/common/arg_types.ts b/ui/src/common/arg_types.ts
index bbea394..e67268f 100644
--- a/ui/src/common/arg_types.ts
+++ b/ui/src/common/arg_types.ts
@@ -12,20 +12,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-export type Arg = string|
-    {kind: 'SLICE', trackId: string, sliceId: number, description?: string};
-export type Args = Map<string, Arg>;
-
-export type ArgsTree = ArgsTreeMap|ArgsTreeArray|string;
-export type ArgsTreeArray = ArgsTree[];
-export interface ArgsTreeMap {
-  [key: string]: ArgsTree;
-}
-
-export function isArgTreeArray(item: ArgsTree): item is ArgsTreeArray {
-  return typeof item === 'object' && Array.isArray(item);
-}
-
-export function isArgTreeMap(item: ArgsTree): item is ArgsTreeMap {
-  return typeof item === 'object' && !Array.isArray(item);
-}
+export type ArgValue =
+    string|{kind: 'SLICE', trackId: string, sliceId: number, rawValue: string};
+export type Args = Map<string, ArgValue>;
diff --git a/ui/src/common/time.ts b/ui/src/common/time.ts
index a96778d..d9157df 100644
--- a/ui/src/common/time.ts
+++ b/ui/src/common/time.ts
@@ -13,10 +13,13 @@
 // limitations under the License.
 
 import {assertTrue} from '../base/logging';
+import {asTPTimestamp, toTraceTime} from '../frontend/sql_types';
+
 import {ColumnType} from './query_result';
 
 // TODO(hjd): Combine with timeToCode.
-export function timeToString(sec: number) {
+export function tpTimeToString(time: TPTime) {
+  const sec = tpTimeToSeconds(time);
   const units = ['s', 'ms', 'us', 'ns'];
   const sign = Math.sign(sec);
   let n = Math.abs(sec);
@@ -28,67 +31,60 @@
   return `${sign < 0 ? '-' : ''}${Math.round(n * 10) / 10} ${units[u]}`;
 }
 
-export function tpTimeToString(time: TPTime) {
-  // TODO(stevegolton): Write a formatter to format bigint timestamps natively.
-  return timeToString(tpTimeToSeconds(time));
+// 1000000023ns -> "1.000 000 023"
+export function formatTPTime(time: TPTime) {
+  const strTime = time.toString().padStart(10, '0');
+
+  const nanos = strTime.slice(-3);
+  const micros = strTime.slice(-6, -3);
+  const millis = strTime.slice(-9, -6);
+  const seconds = strTime.slice(0, -9);
+
+  return `${seconds}.${millis} ${micros} ${nanos}`;
 }
 
-export function fromNs(ns: number) {
-  return ns / 1e9;
-}
-
-export function toNsFloor(seconds: number) {
-  return Math.floor(seconds * 1e9);
-}
-
-export function toNsCeil(seconds: number) {
-  return Math.ceil(seconds * 1e9);
+// TODO(hjd): Rename to formatTimestampWithUnits
+// 1000000023ns -> "1s 23ns"
+export function tpTimeToCode(time: TPTime): string {
+  let result = '';
+  if (time < 1) return '0s';
+  const unitAndValue: [string, bigint][] = [
+    ['m', 60000000000n],
+    ['s', 1000000000n],
+    ['ms', 1000000n],
+    ['us', 1000n],
+    ['ns', 1n],
+  ];
+  unitAndValue.forEach(([unit, unitSize]) => {
+    if (time >= unitSize) {
+      const unitCount = time / unitSize;
+      result += unitCount.toLocaleString() + unit + ' ';
+      time %= unitSize;
+    }
+  });
+  return result.slice(0, -1);
 }
 
 export function toNs(seconds: number) {
   return Math.round(seconds * 1e9);
 }
 
-// 1000000023ns -> "1.000 000 023"
-export function formatTimestamp(sec: number) {
-  const parts = sec.toFixed(9).split('.');
-  parts[1] = parts[1].replace(/\B(?=(\d{3})+(?!\d))/g, ' ');
-  return parts.join('.');
-}
-
-export function formatTPTime(time: TPTime) {
-  // TODO(stevegolton): Write a formatter to format bigint timestamps natively.
-  return formatTimestamp(tpTimeToSeconds(time));
-}
-
-// TODO(hjd): Rename to formatTimestampWithUnits
-// 1000000023ns -> "1s 23ns"
-export function timeToCode(sec: number): string {
-  let result = '';
-  let ns = Math.round(sec * 1e9);
-  if (ns < 1) return '0s';
-  const unitAndValue = [
-    ['m', 60000000000],
-    ['s', 1000000000],
-    ['ms', 1000000],
-    ['us', 1000],
-    ['ns', 1],
-  ];
-  unitAndValue.forEach((pair) => {
-    const unit = pair[0] as string;
-    const val = pair[1] as number;
-    if (ns >= val) {
-      const i = Math.floor(ns / val);
-      ns -= i * val;
-      result += i.toLocaleString() + unit + ' ';
-    }
-  });
-  return result.slice(0, -1);
-}
-
-export function tpTimeToCode(time: TPTime) {
-  // TODO(stevegolton): Write a formatter to format bigint timestamps natively.
-  return timeToCode(tpTimeToSeconds(time));
+// Given an absolute time in TP units, print the time from the start of the
+// trace as a string.
+// Going forward this shall be the universal timestamp printing function
+// superseding all others, with options to customise formatting and the domain.
+// If minimal is true, the time will be printed without any units and in a
+// minimal but still readable format, otherwise the time will be printed with
+// units on each group of digits. Use minimal in places like tables and
+// timelines where there are likely to be multiple timestamps in one place, and
+// use the normal formatting in places that have one-off timestamps.
+export function formatTime(time: TPTime, minimal: boolean = false): string {
+  const relTime = toTraceTime(asTPTimestamp(time));
+  if (minimal) {
+    return formatTPTime(relTime);
+  } else {
+    return tpTimeToCode(relTime);
+  }
 }
 
 export function currentDateHourAndMinute(): string {
diff --git a/ui/src/common/time_unittest.ts b/ui/src/common/time_unittest.ts
index b9e6bd9..47395ec 100644
--- a/ui/src/common/time_unittest.ts
+++ b/ui/src/common/time_unittest.ts
@@ -12,21 +12,71 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import {timeToCode, TPTime, TPTimeSpan} from './time';
+import {
+  formatTPTime,
+  TPTime,
+  TPTimeSpan,
+  tpTimeToCode,
+  tpTimeToString,
+} from './time';
 
-test('seconds to code', () => {
-  expect(timeToCode(3)).toEqual('3s');
-  expect(timeToCode(60)).toEqual('1m');
-  expect(timeToCode(63)).toEqual('1m 3s');
-  expect(timeToCode(63.2)).toEqual('1m 3s 200ms');
-  expect(timeToCode(63.2221)).toEqual('1m 3s 222ms 100us');
-  expect(timeToCode(63.2221111)).toEqual('1m 3s 222ms 111us 100ns');
-  expect(timeToCode(0.2221111)).toEqual('222ms 111us 100ns');
-  expect(timeToCode(0.000001)).toEqual('1us');
-  expect(timeToCode(0.000003)).toEqual('3us');
-  expect(timeToCode(1.000001)).toEqual('1s 1us');
-  expect(timeToCode(200.00000003)).toEqual('3m 20s 30ns');
-  expect(timeToCode(0)).toEqual('0s');
+test('tpTimeToCode', () => {
+  expect(tpTimeToCode(0n)).toEqual('0s');
+  expect(tpTimeToCode(3_000_000_000n)).toEqual('3s');
+  expect(tpTimeToCode(60_000_000_000n)).toEqual('1m');
+  expect(tpTimeToCode(63_000_000_000n)).toEqual('1m 3s');
+  expect(tpTimeToCode(63_200_000_000n)).toEqual('1m 3s 200ms');
+  expect(tpTimeToCode(63_222_100_000n)).toEqual('1m 3s 222ms 100us');
+  expect(tpTimeToCode(63_222_111_100n)).toEqual('1m 3s 222ms 111us 100ns');
+  expect(tpTimeToCode(222_111_100n)).toEqual('222ms 111us 100ns');
+  expect(tpTimeToCode(1_000n)).toEqual('1us');
+  expect(tpTimeToCode(3_000n)).toEqual('3us');
+  expect(tpTimeToCode(1_000_001_000n)).toEqual('1s 1us');
+  expect(tpTimeToCode(200_000_000_030n)).toEqual('3m 20s 30ns');
+  expect(tpTimeToCode(3_600_000_000_000n)).toEqual('60m');
+  expect(tpTimeToCode(3_600_000_000_001n)).toEqual('60m 1ns');
+  expect(tpTimeToCode(86_400_000_000_000n)).toEqual('1,440m');
+  expect(tpTimeToCode(86_400_000_000_001n)).toEqual('1,440m 1ns');
+  expect(tpTimeToCode(31_536_000_000_000_000n)).toEqual('525,600m');
+  expect(tpTimeToCode(31_536_000_000_000_001n)).toEqual('525,600m 1ns');
+});
+
+test('formatTPTime', () => {
+  expect(formatTPTime(0n)).toEqual('0.000 000 000');
+  expect(formatTPTime(3_000_000_000n)).toEqual('3.000 000 000');
+  expect(formatTPTime(60_000_000_000n)).toEqual('60.000 000 000');
+  expect(formatTPTime(63_000_000_000n)).toEqual('63.000 000 000');
+  expect(formatTPTime(63_200_000_000n)).toEqual('63.200 000 000');
+  expect(formatTPTime(63_222_100_000n)).toEqual('63.222 100 000');
+  expect(formatTPTime(63_222_111_100n)).toEqual('63.222 111 100');
+  expect(formatTPTime(222_111_100n)).toEqual('0.222 111 100');
+  expect(formatTPTime(1_000n)).toEqual('0.000 001 000');
+  expect(formatTPTime(3_000n)).toEqual('0.000 003 000');
+  expect(formatTPTime(1_000_001_000n)).toEqual('1.000 001 000');
+  expect(formatTPTime(200_000_000_030n)).toEqual('200.000 000 030');
+  expect(formatTPTime(3_600_000_000_000n)).toEqual('3600.000 000 000');
+  expect(formatTPTime(86_400_000_000_000n)).toEqual('86400.000 000 000');
+  expect(formatTPTime(86_400_000_000_001n)).toEqual('86400.000 000 001');
+  expect(formatTPTime(31_536_000_000_000_000n)).toEqual('31536000.000 000 000');
+  expect(formatTPTime(31_536_000_000_000_001n)).toEqual('31536000.000 000 001');
+});
+
+test('tpTimeToString', () => {
+  expect(tpTimeToString(0n)).toEqual('0 s');
+  expect(tpTimeToString(3_000_000_000n)).toEqual('3 s');
+  expect(tpTimeToString(60_000_000_000n)).toEqual('60 s');
+  expect(tpTimeToString(63_000_000_000n)).toEqual('63 s');
+  expect(tpTimeToString(63_200_000_000n)).toEqual('63.2 s');
+  expect(tpTimeToString(63_222_100_000n)).toEqual('63.2 s');
+  expect(tpTimeToString(63_222_111_100n)).toEqual('63.2 s');
+  expect(tpTimeToString(222_111_100n)).toEqual('222.1 ms');
+  expect(tpTimeToString(1_000n)).toEqual('1 us');
+  expect(tpTimeToString(3_000n)).toEqual('3 us');
+  expect(tpTimeToString(1_000_001_000n)).toEqual('1 s');
+  expect(tpTimeToString(200_000_000_030n)).toEqual('200 s');
+  expect(tpTimeToString(3_600_000_000_000n)).toEqual('3600 s');
+  expect(tpTimeToString(86_400_000_000_000n)).toEqual('86400 s');
+  expect(tpTimeToString(31_536_000_000_000_000n)).toEqual('31536000 s');
 });
 
 function mkSpan(start: TPTime, end: TPTime) {
diff --git a/ui/src/controller/args_parser.ts b/ui/src/controller/args_parser.ts
index 30afa1e..c890fb3 100644
--- a/ui/src/controller/args_parser.ts
+++ b/ui/src/controller/args_parser.ts
@@ -12,125 +12,52 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import {
-  Args,
-  ArgsTree,
-  ArgsTreeArray,
-  ArgsTreeMap,
-  isArgTreeArray,
-  isArgTreeMap,
-} from '../common/arg_types';
+export type Key = string|number;
+
+export interface Argument<T> {
+  key: Key;
+  path?: string;
+  value?: T;
+  children?: Argument<T>[];
+}
 
 // Converts a flats sequence of key-value pairs into a JSON-like nested
 // structure. Dots in keys are used to create a nested dictionary, indices in
-// brackets used to create nested array. For example, consider the following
-// sequence of key-value pairs:
-//
-// simple_key = simple_value
-// thing.key = value
-// thing.point[0].x = 10
-// thing.point[0].y = 20
-// thing.point[1].x = 0
-// thing.point[1].y = -10
-//
-// It's going to be converted to a following object:
-//
-// {
-//   "simple_key": "simple_value",
-//   "thing": {
-//     "key": "value",
-//     "point": [
-//       { "x": "10", "y": "20" },
-//       { "x": "0", "y": "-10" }
-//     ]
-//   }
-// }
-export function parseArgs(args: Args): ArgsTree|undefined {
-  const result: ArgsTreeMap = {};
-  for (const [key, value] of args) {
-    if (typeof value === 'string') {
-      fillObject(result, key.split('.'), value);
-    }
+// brackets used to create nested array.
+export function convertArgsToTree<T>(input: Map<string, T>): Argument<T>[] {
+  const result: Argument<T>[] = [];
+  for (const [path, value] of input.entries()) {
+    const nestedKey = getNestedKey(path);
+    insert(result, nestedKey, path, value);
   }
   return result;
 }
 
-function getOrCreateMap(
-    object: ArgsTreeMap|ArgsTreeArray, key: string|number): ArgsTreeMap {
-  let value: ArgsTree;
-  if (isArgTreeMap(object) && typeof key === 'string') {
-    value = object[key];
-  } else if (isArgTreeArray(object) && typeof key === 'number') {
-    value = object[key];
+function getNestedKey(key: string): Key[] {
+  const result: Key[] = [];
+  let match;
+  const re = /([^\.\[\]]+)|\[(\d+)\]/g;
+  while ((match = re.exec(key)) !== null) {
+    result.push(match[2] ? parseInt(match[2]) : match[1]);
+  }
+  return result;
+}
+
+function insert<T>(
+    args: Argument<T>[], keys: Key[], path: string, value: T): void {
+  const currentKey = keys.shift()!;
+  let node = args.find((x) => x.key === currentKey);
+  if (!node) {
+    node = {key: currentKey};
+    args.push(node);
+  }
+  if (keys.length > 0) {
+    if (node.children === undefined) {
+      node.children = [];
+    }
+    insert(node.children, keys, path, value);
   } else {
-    throw new Error('incompatible parameters to getOrCreateSubmap');
+    node.path = path;
+    node.value = value;
   }
-
-  if (value !== undefined) {
-    if (isArgTreeMap(value)) {
-      return value;
-    } else {
-      // There is a value, but it's not a map - something wrong with the key set
-      throw new Error('inconsistent keys');
-    }
-  }
-
-  value = {};
-  if (isArgTreeMap(object) && typeof key === 'string') {
-    object[key] = value;
-  } else if (isArgTreeArray(object) && typeof key === 'number') {
-    object[key] = value;
-  }
-
-  return value;
-}
-
-function getOrCreateArray(object: ArgsTreeMap, key: string): ArgsTree[] {
-  let value = object[key];
-  if (value !== undefined) {
-    if (isArgTreeArray(value)) {
-      return value;
-    } else {
-      // There is a value, but it's not an array - something wrong with the key
-      // set
-      throw new Error('inconsistent keys');
-    }
-  }
-
-  value = [];
-  object[key] = value;
-  return value;
-}
-
-function fillObject(object: ArgsTreeMap, path: string[], value: string) {
-  let current = object;
-  for (let i = 0; i < path.length - 1; i++) {
-    const [part, index] = parsePathSegment(path[i]);
-    if (index === undefined) {
-      current = getOrCreateMap(current, part);
-    } else {
-      const array = getOrCreateArray(current, part);
-      current = getOrCreateMap(array, index);
-    }
-  }
-
-  const [part, index] = parsePathSegment(path[path.length - 1]);
-  if (index === undefined) {
-    current[part] = value;
-  } else {
-    const array = getOrCreateArray(current, part);
-    array[index] = value;
-  }
-}
-
-// Segment is either a simple key (e.g. "foo") or a key with an index (e.g.
-// "bar[42]"). This function returns a pair of key and index (if present).
-function parsePathSegment(segment: string): [string, number?] {
-  if (!segment.endsWith(']')) {
-    return [segment, undefined];
-  }
-
-  const indexStart = segment.indexOf('[');
-  const indexString = segment.substring(indexStart + 1, segment.length - 1);
-  return [segment.substring(0, indexStart), Math.floor(Number(indexString))];
 }
diff --git a/ui/src/controller/args_parser_unittest.ts b/ui/src/controller/args_parser_unittest.ts
new file mode 100644
index 0000000..6a763d7
--- /dev/null
+++ b/ui/src/controller/args_parser_unittest.ts
@@ -0,0 +1,76 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import {convertArgsToTree} from './args_parser';
+
+test('parseArgs', () => {
+  const input = new Map<string, string>([
+    ['simple_key', 'simple_value'],
+    ['thing.key', 'value'],
+    ['thing.point[0].x', '10'],
+    ['thing.point[0].y', '20'],
+    ['thing.point[1].x', '0'],
+    ['thing.point[1].y', '-10'],
+    ['foo.bar.foo.bar', 'baz'],
+  ]);
+
+  const result = convertArgsToTree(input);
+
+  expect(result).toEqual(
+      [
+        {key: 'simple_key', path: 'simple_key', value: 'simple_value'},
+        {
+          key: 'thing',
+          children: [
+            {key: 'key', path: 'thing.key', value: 'value'},
+            {
+              key: 'point',
+              children: [
+                {
+                  key: 0,
+                  children: [
+                    {key: 'x', path: 'thing.point[0].x', value: '10'},
+                    {key: 'y', path: 'thing.point[0].y', value: '20'},
+                  ],
+                },
+                {
+                  key: 1,
+                  children: [
+                    {key: 'x', path: 'thing.point[1].x', value: '0'},
+                    {key: 'y', path: 'thing.point[1].y', value: '-10'},
+                  ],
+                },
+              ],
+            },
+          ],
+        },
+        {
+          key: 'foo',
+          children: [
+            {
+              key: 'bar',
+              children: [
+                {
+                  key: 'foo',
+                  children: [
+                    {key: 'bar', path: 'foo.bar.foo.bar', value: 'baz'},
+                  ],
+                },
+              ],
+            },
+          ],
+        },
+      ],
+  );
+});
diff --git a/ui/src/controller/selection_controller.ts b/ui/src/controller/selection_controller.ts
index b5a3907..203cb4f 100644
--- a/ui/src/controller/selection_controller.ts
+++ b/ui/src/controller/selection_controller.ts
@@ -13,7 +13,7 @@
 // limitations under the License.
 
 import {assertTrue} from '../base/logging';
-import {Arg, Args} from '../common/arg_types';
+import {Args, ArgValue} from '../common/arg_types';
 import {Engine} from '../common/engine';
 import {
   LONG,
@@ -41,7 +41,6 @@
 } from '../frontend/publish';
 import {SLICE_TRACK_KIND} from '../tracks/chrome_slices';
 
-import {parseArgs} from './args_parser';
 import {Controller} from './controller';
 
 export interface SelectionControllerArgs {
@@ -214,7 +213,6 @@
       }
     }
 
-    const argsTree = parseArgs(args);
     const selected: SliceDetails = {
       id: selectedId,
       ts,
@@ -225,7 +223,6 @@
       name,
       category,
       args,
-      argsTree,
     };
 
     if (trackId !== undefined) {
@@ -273,7 +270,7 @@
   }
 
   async getArgs(argId: number): Promise<Args> {
-    const args = new Map<string, Arg>();
+    const args = new Map<string, ArgValue>();
     const query = `
       select
         key AS name,
@@ -291,9 +288,12 @@
       const value = it.value || 'NULL';
       if (name === 'destination slice id' && !isNaN(Number(value))) {
         const destTrackId = await this.getDestTrackId(value);
-        args.set(
-            'Destination Slice',
-            {kind: 'SLICE', trackId: destTrackId, sliceId: Number(value)});
+        args.set('Destination Slice', {
+          kind: 'SLICE',
+          trackId: destTrackId,
+          sliceId: Number(value),
+          rawValue: value,
+        });
       } else {
         args.set(name, value);
       }
diff --git a/ui/src/frontend/base_slice_track.ts b/ui/src/frontend/base_slice_track.ts
index 88badd1..a941a31 100644
--- a/ui/src/frontend/base_slice_track.ts
+++ b/ui/src/frontend/base_slice_track.ts
@@ -33,6 +33,7 @@
 import {globals} from './globals';
 import {Slice} from './slice';
 import {DEFAULT_SLICE_LAYOUT, SliceLayout} from './slice_layout';
+import {constraintsToQueryFragment} from './sql_utils';
 import {NewTrackArgs, SliceRect, Track} from './track';
 import {BUCKETS_PER_PIXEL, CacheKey, TrackCache} from './track_cache';
 
@@ -68,8 +69,8 @@
 
   // We do not need to handle non-ending slices (where dur = -1
   // but the slice is drawn as 'infinite' length) as this is handled
-  // by a special code path.
-  // TODO(hjd): Implement special code path.
+  // by a special code path. See 'incomplete' in the INITIALIZING
+  // code of maybeRequestData.
 
   // While the slices are guaranteed to be ordered by timestamp we must
   // consider async slices (which are not perfectly nested). This is to
@@ -186,8 +187,15 @@
   private cache: TrackCache<Array<CastInternal<T['slice']>>> =
       new TrackCache(5);
 
+  // Incomplete slices (dur = -1). Rather than adding a lot of logic to
+  // the SQL queries to handle this case we materialise them one off
+  // then unconditionally render them. This should be efficient since
+  // there are at most |depth| slices.
+  private incomplete = new Array<CastInternal<T['slice']>>();
+
   protected readonly tableName: string;
   private maxDurNs: TPDuration = 0n;
+
   private sqlState: 'UNINITIALIZED'|'INITIALIZING'|'QUERY_PENDING'|
       'QUERY_DONE' = 'UNINITIALIZED';
   private extraSqlColumns: string[];
@@ -262,6 +270,7 @@
   onFullRedraw(): void {
     // Give a chance to the embedder to change colors and other stuff.
     this.onUpdatedSlices(this.slices);
+    this.onUpdatedSlices(this.incomplete);
   }
 
   protected isSelectionHandled(selection: Selection): boolean {
@@ -343,6 +352,9 @@
         // bounding box that will contain the chevron.
         slice.x -= CHEVRON_WIDTH_PX / 2;
         slice.w = CHEVRON_WIDTH_PX;
+      } else if (slice.flags & SLICE_FLAGS_INCOMPLETE) {
+        slice.x = Math.max(slice.x, 0);
+        slice.w = pxEnd - slice.x;
       } else {
         // If the slice is an actual slice, intersect the slice geometry with
         // the visible viewport (this affects only the first and last slice).
@@ -364,7 +376,6 @@
     }
 
     // Second pass: fill slices by color.
-    // The .slice() turned out to be an unintended pun.
     const vizSlicesByColor = vizSlices.slice();
     vizSlicesByColor.sort((a, b) => colorCompare(a.color, b.color));
     let lastColor = undefined;
@@ -488,6 +499,49 @@
           from ${this.tableName}`);
       const row = queryRes.firstRow({maxDur: LONG, rowCount: NUM});
       this.maxDurNs = row.maxDur;
+
+      // One off materialise the incomplete slices. The number of
+      // incomplete slices is smaller than the depth of the track and
+      // both are expected to be small.
+      if (this.isDestroyed) {
+        return;
+      }
+      {
+        // TODO(hjd): Consider case below:
+        // raw:
+        // 0123456789
+        //   [A     did not end)
+        //     [B ]
+        //
+        //
+        // quantised:
+        // 0123456789
+        //   [A     did not end)
+        // [     B  ]
+        // Does it lead to odd results?
+        const extraCols = this.extraSqlColumns.join(',');
+        const queryRes = await this.engine.query(`
+          select
+            ts as tsq,
+            ts as tsqEnd,
+            ts,
+            -1 as dur,
+            id,
+            ${this.depthColumn()}
+            ${extraCols ? ',' + extraCols : ''}
+          from ${this.tableName}
+          where dur = -1;
+        `);
+        const incomplete =
+            new Array<CastInternal<T['slice']>>(queryRes.numRows());
+        const it = queryRes.iter(this.getRowSpec());
+        for (let i = 0; it.valid(); it.next(), ++i) {
+          incomplete[i] = this.rowToSliceInternal(it);
+        }
+        this.onUpdatedSlices(incomplete);
+        this.incomplete = incomplete;
+      }
+
       this.sqlState = 'QUERY_DONE';
     } else if (
         this.sqlState === 'INITIALIZING' || this.sqlState === 'QUERY_PENDING') {
@@ -529,24 +583,24 @@
     }
 
     const extraCols = this.extraSqlColumns.join(',');
-    let depthCol = 'depth';
-    let maybeGroupByDepth = 'depth, ';
-    const layout = this.sliceLayout;
-    const isFlat = (layout.maxDepth - layout.minDepth) <= 1;
-    // maxDepth === minDepth only makes sense if track is empty which on the
-    // one hand isn't very useful (and so maybe should be an error) on the
-    // other hand I can see it happening if someone does:
-    // minDepth = min(slices.depth); maxDepth = max(slices.depth);
-    // and slices is empty, so we treat that as flat.
-    if (isFlat) {
-      depthCol = `${this.sliceLayout.minDepth} as depth`;
-      maybeGroupByDepth = '';
-    }
+    const maybeDepth = this.isFlat() ? undefined : 'depth';
 
-    // TODO(hjd): Re-reason and improve this query:
-    // - Materialize the unfinished slices one off.
-    // - Avoid the union if we know we don't have any -1 slices.
-    // - Maybe we don't need the union at all and can deal in TS?
+    const constraint = constraintsToQueryFragment({
+      filters: [
+        `ts >= ${slicesKey.start - this.maxDurNs}`,
+        `ts <= ${slicesKey.end}`,
+        `dur != -1`,
+      ],
+      groupBy: [
+        maybeDepth,
+        'tsq',
+      ],
+      orderBy: [
+        maybeDepth,
+        'tsq',
+      ],
+    });
+
     if (this.isDestroyed) {
       this.sqlState = 'QUERY_DONE';
       return;
@@ -554,38 +608,15 @@
     // TODO(hjd): Count and expose the number of slices summarized in
     // each bucket?
     const queryRes = await this.engine.query(`
-    with q1 as (
-      select
-        ${queryTsq} as tsq,
-        ${queryTsqEnd} as tsqEnd,
+      SELECT
+        ${queryTsq} AS tsq,
+        ${queryTsqEnd} AS tsqEnd,
         ts,
-        max(dur) as dur,
+        MAX(dur) AS dur,
         id,
-        ${depthCol}
+        ${this.depthColumn()}
         ${extraCols ? ',' + extraCols : ''}
-      from ${this.tableName}
-      where
-        ts >= ${slicesKey.start - this.maxDurNs /* - durNs */} and
-        ts <= ${slicesKey.end /* + durNs */}
-      group by ${maybeGroupByDepth} tsq
-      order by tsq),
-    q2 as (
-      select
-        ${queryTsq} as tsq,
-        ${queryTsqEnd} as tsqEnd,
-        ts,
-        -1 as dur,
-        id,
-        ${depthCol}
-        ${extraCols ? ',' + extraCols : ''}
-      from ${this.tableName}
-      where dur = -1
-      group by ${maybeGroupByDepth} tsq
-      )
-      select min(dur) as _unused, * from
-      (select * from q1 union all select * from q2)
-      group by ${maybeGroupByDepth} tsq
-      order by tsq
+      FROM ${this.tableName} ${constraint}
     `);
 
     // Here convert each row to a Slice. We do what we can do
@@ -657,8 +688,9 @@
       return undefined;
     }
 
+    const depth = Math.floor((y - padding) / (sliceHeight + rowSpacing));
+
     if (y >= padding && y <= trackHeight - padding) {
-      const depth = Math.floor((y - padding) / (sliceHeight + rowSpacing));
       for (const slice of this.slices) {
         if (slice.depth === depth && slice.x <= x && x <= slice.x + slice.w) {
           return slice;
@@ -666,9 +698,28 @@
       }
     }
 
+    for (const slice of this.incomplete) {
+      if (slice.depth === depth && slice.x <= x) {
+        return slice;
+      }
+    }
+
     return undefined;
   }
 
+  private isFlat(): boolean {
+    // maxDepth and minDepth are a half open range so in the normal flat
+    // case maxDepth = 1 and minDepth = 0. In the non flat case:
+    // maxDepth = 42 and minDepth = 0. maxDepth === minDepth should not
+    // occur but is could happen if there are zero slices I guess so
+    // treat this as flat also.
+    return (this.sliceLayout.maxDepth - this.sliceLayout.minDepth) <= 1;
+  }
+
+  private depthColumn(): string {
+    return this.isFlat() ? `${this.sliceLayout.minDepth} as depth` : 'depth';
+  }
+
   onMouseMove(position: {x: number, y: number}): void {
     this.hoverPos = position;
     this.updateHoveredSlice(this.findSlice(position));
@@ -711,8 +762,9 @@
 
   private getVisibleSlicesInternal(start: TPTime, end: TPTime):
       Array<CastInternal<T['slice']>> {
-    return filterVisibleSlices<CastInternal<T['slice']>>(
-        this.slices, start, end);
+    const slices =
+        filterVisibleSlices<CastInternal<T['slice']>>(this.slices, start, end);
+    return slices.concat(this.incomplete);
   }
 
   private updateSliceAndTrackHeight() {
diff --git a/ui/src/frontend/chrome_slice_panel.ts b/ui/src/frontend/chrome_slice_panel.ts
index 58efd07..b7e9d87 100644
--- a/ui/src/frontend/chrome_slice_panel.ts
+++ b/ui/src/frontend/chrome_slice_panel.ts
@@ -16,17 +16,32 @@
 
 import {sqliteString} from '../base/string_utils';
 import {Actions} from '../common/actions';
-import {Arg, ArgsTree, isArgTreeArray, isArgTreeMap} from '../common/arg_types';
+import {ArgValue} from '../common/arg_types';
 import {EngineProxy} from '../common/engine';
 import {runQuery} from '../common/queries';
-import {TPDuration, tpDurationToSeconds, tpTimeToCode} from '../common/time';
+import {
+  TPDuration,
+  tpDurationToSeconds,
+  TPTime,
+  tpTimeToCode,
+} from '../common/time';
+import {Argument, convertArgsToTree, Key} from '../controller/args_parser';
 
+import {Anchor} from './anchor';
 import {FlowPoint, globals, SliceDetails} from './globals';
-import {PanelSize} from './panel';
-import {PopupMenuButton, PopupMenuItem} from './popup_menu';
 import {runQueryInNewTab} from './query_result_tab';
 import {verticalScrollToTrack} from './scroll_helper';
-import {SlicePanel} from './slice_panel';
+import {Icons} from './semantic_icons';
+import {asTPTimestamp} from './sql_types';
+import {Button} from './widgets/button';
+import {DetailsShell} from './widgets/details_shell';
+import {Column, GridLayout} from './widgets/grid_layout';
+import {MenuItem, PopupMenu2} from './widgets/menu';
+import {Section} from './widgets/section';
+import {SqlRef} from './widgets/sql_ref';
+import {Timestamp} from './widgets/timestamp';
+import {Tree, TreeNode} from './widgets/tree';
+import {exists} from './widgets/utils';
 
 interface ContextMenuItem {
   name: string;
@@ -125,14 +140,8 @@
   },
 ];
 
-function getSliceContextMenuItems(slice: SliceDetails): PopupMenuItem[] {
-  return ITEMS.filter((item) => item.shouldDisplay(slice)).map((item) => {
-    return {
-      itemType: 'regular',
-      text: item.name,
-      callback: () => item.getAction(slice),
-    };
-  });
+function getSliceContextMenuItems(slice: SliceDetails) {
+  return ITEMS.filter((item) => item.shouldDisplay(slice));
 }
 
 function getEngine(): EngineProxy|undefined {
@@ -144,420 +153,372 @@
   return engine;
 }
 
-// Table row contents is one of two things:
-// 1. Key-value pair
-interface TableRow {
-  kind: 'TableRow';
-  key: string;
-  value: Arg;
-
-  // Whether it's an argument (from the `args` table) or whether it's a property
-  // of the slice (i.e. `dur`, coming from `slice` table). Args have additional
-  // actions associated with them.
-  isArg: boolean;
-
-  // A full key for the arguments displayed in a tree.
-  full_key?: string;
-}
-
-// 2. Common prefix for values in an array
-interface TableHeader {
-  kind: 'TableHeader';
-  header: string;
-}
-
-type RowContents = TableRow|TableHeader;
-
-function isTableHeader(contents: RowContents): contents is TableHeader {
-  return contents.kind === 'TableHeader';
-}
-
-function appendPrefix(p1: string, p2: string): string {
-  if (p1.length === 0) {
-    return p2;
-  }
-  return `${p1}.${p2}`;
-}
-
-interface Row {
-  // How many columns (empty or with an index) precede a key
-  indentLevel: number;
-  // Optional tooltip to be displayed on the key. Used to display the full key,
-  // which has to be reconstructed from the information that might not even be
-  // visible on the screen otherwise.
-  tooltip?: string;
-  contents: RowContents;
-}
-
-class TableBuilder {
-  // Row data generated by builder
-  rows: Row[] = [];
-  indentLevel = 0;
-
-  // Maximum indent level of a key, used to determine total number of columns
-  maxIndent = 0;
-
-  // Add a key-value pair into the table
-  add(key: string, value: Arg) {
-    this.rows.push({
-      indentLevel: 0,
-      contents: {kind: 'TableRow', key, value, isArg: false},
-    });
-  }
-
-  // Add arguments tree into the table
-  addTree(tree: ArgsTree) {
-    this.addTreeInternal(tree, '', '');
-  }
-
-  private addTreeInternal(
-      record: ArgsTree, prefix: string, completePrefix: string) {
-    if (isArgTreeArray(record)) {
-      if (record.length === 1) {
-        this.addTreeInternal(record[0], `${prefix}[0]`, `${completePrefix}[0]`);
-        return;
-      }
-
-      // Add the current prefix as a separate row
-      if (prefix.length > 0) {
-        this.rows.push({
-          indentLevel: this.indentLevel,
-          contents: {kind: 'TableHeader', header: prefix},
-          tooltip: completePrefix,
-        });
-      }
-
-      this.indentLevel++;
-      for (let i = 0; i < record.length; i++) {
-        // Prefix is empty for array elements because we don't want to repeat
-        // the common prefix
-        this.addTreeInternal(record[i], `[${i}]`, `${completePrefix}[${i}]`);
-      }
-      this.indentLevel--;
-    } else if (isArgTreeMap(record)) {
-      const entries = Object.entries(record);
-      if (entries.length === 1) {
-        // Don't want to create a level of indirection in case object contains
-        // only one value; think of it like file browser in IDEs not showing
-        // intermediate nodes for common hierarchy corresponding to Java package
-        // prefix (e.g. "com/google/perfetto").
-        //
-        // In this case, add key as a prefix part.
-        const [key, value] = entries[0];
-        this.addTreeInternal(
-            value,
-            appendPrefix(prefix, key),
-            appendPrefix(completePrefix, key));
-      } else {
-        if (prefix.length > 0) {
-          const row = this.indentLevel;
-          this.rows.push({
-            indentLevel: row,
-            contents: {kind: 'TableHeader', header: prefix},
-            tooltip: completePrefix,
-          });
-          this.indentLevel++;
-        }
-        for (const [key, value] of entries) {
-          this.addTreeInternal(value, key, appendPrefix(completePrefix, key));
-        }
-        if (prefix.length > 0) {
-          this.indentLevel--;
-        }
-      }
-    } else {
-      // Leaf value in the tree: add to the table
-      const row = this.indentLevel;
-      this.rows.push({
-        indentLevel: row,
-        contents: {
-          kind: 'TableRow',
-          key: prefix,
-          value: record,
-          full_key: completePrefix,
-          isArg: true,
-        },
-        tooltip: completePrefix,
-      });
-    }
+function getArgValueRaw(value: ArgValue): string {
+  if (typeof value === 'object') {
+    return value.rawValue;
+  } else {
+    return value;
   }
 }
 
-export class ChromeSliceDetailsPanel extends SlicePanel {
-  view() {
-    const sliceInfo = globals.sliceDetails;
-    if (sliceInfo.ts !== undefined && sliceInfo.dur !== undefined &&
-        sliceInfo.name !== undefined) {
-      const defaultBuilder = new TableBuilder();
-      defaultBuilder.add('Name', sliceInfo.name);
-      defaultBuilder.add(
-          'Category',
-          !sliceInfo.category || sliceInfo.category === '[NULL]' ?
-              'N/A' :
-              sliceInfo.category);
-      defaultBuilder.add(
-          'Start time',
-          tpTimeToCode(sliceInfo.ts - globals.state.traceTime.start));
-      if (sliceInfo.absTime !== undefined) {
-        defaultBuilder.add('Absolute Time', sliceInfo.absTime);
-      }
-      defaultBuilder.add(
-          'Duration', this.computeDuration(sliceInfo.ts, sliceInfo.dur));
-      if (sliceInfo.threadTs !== undefined &&
-          sliceInfo.threadDur !== undefined) {
-        // If we have valid thread duration, also display a percentage of
-        // |threadDur| compared to |dur|.
-        const ratio = tpDurationToSeconds(sliceInfo.threadDur) /
-            tpDurationToSeconds(sliceInfo.dur);
-        const threadDurFractionSuffix = sliceInfo.threadDur === -1n ?
-            '' :
-            ` (${(ratio * 100).toFixed(2)}%)`;
-        defaultBuilder.add(
-            'Thread duration',
-            this.computeDuration(sliceInfo.threadTs, sliceInfo.threadDur) +
-                threadDurFractionSuffix);
-      }
-
-      for (const [key, value] of this.getProcessThreadDetails(sliceInfo)) {
-        if (value !== undefined) {
-          defaultBuilder.add(key, value);
-        }
-      }
-
-      defaultBuilder.add(
-          'Slice ID',
-          (sliceInfo.id !== undefined) ? sliceInfo.id.toString() : 'Unknown');
-      if (sliceInfo.description) {
-        for (const [key, value] of sliceInfo.description) {
-          defaultBuilder.add(key, value);
-        }
-      }
-      return m(
-          '.details-panel',
-          m('.details-panel-heading', m('h2', `Slice Details`)),
-          m('.details-table-multicolumn', [
-            this.renderTable(defaultBuilder, '.half-width-panel'),
-            this.renderRhs(sliceInfo),
-          ]));
-    } else {
-      return m(
-          '.details-panel',
-          m('.details-panel-heading',
-            m(
-                'h2',
-                `Slice Details`,
-                )));
-    }
-  }
-
-  private fillFlowPanel(
-      name: string, flows: {flow: FlowPoint, dur: TPDuration}[],
-      includeProcessName: boolean, result: Map<string, TableBuilder>) {
-    if (flows.length === 0) return;
-
-    const builder = new TableBuilder();
-    for (const {flow, dur} of flows) {
-      builder.add('Slice', {
-        kind: 'SLICE',
-        sliceId: flow.sliceId,
-        trackId: globals.state.uiTrackIdByTraceTrackId[flow.trackId],
-        description: flow.sliceChromeCustomName === undefined ?
-            flow.sliceName :
-            flow.sliceChromeCustomName,
-      });
-      builder.add('Delay', tpTimeToCode(dur));
-      builder.add(
-          'Thread',
-          includeProcessName ? `${flow.threadName} (${flow.processName})` :
-                               flow.threadName);
-    }
-    result.set(name, builder);
-  }
-
-  renderCanvas(_ctx: CanvasRenderingContext2D, _size: PanelSize) {}
-
-  fillArgs(slice: SliceDetails, builder: TableBuilder) {
-    if (slice.argsTree && slice.args) {
-      // Parsed arguments are available, need only to iterate over them to get
-      // slice references
-      for (const [key, value] of slice.args) {
-        if (typeof value !== 'string') {
-          builder.add(key, value);
-        }
-      }
-      builder.addTree(slice.argsTree);
-    } else if (slice.args) {
-      // Parsing has failed, but arguments are available: display them in a flat
-      // 2-column table
-      for (const [key, value] of slice.args) {
-        builder.add(key, value);
-      }
-    }
-  }
-
-  private getArgumentContextMenuItems(argument: TableRow): PopupMenuItem[] {
-    if (argument.full_key === undefined) return [];
-    if (typeof argument.value !== 'string') return [];
-    const argValue: string = argument.value;
-
-    const fullKey = argument.full_key;
-    return [
-      {
-        itemType: 'regular',
-        text: 'Copy full key',
-        callback: () => {
-          navigator.clipboard.writeText(fullKey);
-        },
-      },
-      {
-        itemType: 'regular',
-        text: 'Find slices with the same arg value',
-        callback: () => {
-          runQueryInNewTab(
-              `
+// Renders a key with a button to get dropdown things
+function renderArgKey(
+    key: string, fullKey?: string, value?: ArgValue): m.Children {
+  if (value === undefined || fullKey === undefined) {
+    return key;
+  } else {
+    return m(
+        PopupMenu2,
+        {trigger: m(Anchor, {icon: Icons.ContextMenu}, key)},
+        fullKey && m(MenuItem, {
+          label: 'Copy full key',
+          icon: 'content_copy',
+          onclick: () => {
+            navigator.clipboard.writeText(fullKey);
+          },
+        }),
+        value && fullKey && m(MenuItem, {
+          label: 'Find slices with same arg value',
+          icon: 'search',
+          onclick: () => {
+            runQueryInNewTab(
+                `
               select slice.*
               from slice
               join args using (arg_set_id)
               where key=${sqliteString(fullKey)} and display_value=${
-                  sqliteString(argValue)}
+                    sqliteString(getArgValueRaw(value))}
           `,
-              `Arg: ${sqliteString(fullKey)}=${sqliteString(argValue)}`);
-        },
-      },
+                `Arg: ${sqliteString(fullKey)}=${
+                    sqliteString(getArgValueRaw(value))}`);
+          },
+        }),
+        value && fullKey && m(MenuItem, {
+          label: 'Visualise argument values',
+          icon: 'query_stats',
+          onclick: () => {
+            globals.dispatch(Actions.addVisualisedArg({argName: fullKey}));
+          },
+        }),
+    );
+  }
+}
+
+// Try to render arg value as a special value, otherwise just render the text.
+function renderArgValue(value: ArgValue): m.Children {
+  if (typeof value === 'object' && 'kind' in value) {
+    const {kind} = value;
+    if (kind === 'SLICE') {
+      // Value looks like a slice link.
+      const {sliceId, trackId} = value;
+      return renderSliceLink(sliceId, trackId, `slice[${sliceId}]`);
+    } else {
+      const x: never = kind;
+      throw new Error(`No support for args of kind '${x}'`);
+    }
+  } else if (typeof value === 'string') {
+    if (value.startsWith('http://') || value.startsWith('https://')) {
+      // Value looks like a web link.
+      return m(
+          Anchor, {href: value, target: '_blank', icon: 'open_in_new'}, value);
+    } else {
+      // Value is nothing special.
+      return value;
+    }
+  } else {
+    const x: never = value;
+    throw new Error(`Unable to process '${x}' as an arg value`);
+  }
+}
+
+function renderSliceLink(id: number, trackId: string, name: string) {
+  return m(
+      Anchor,
       {
-        itemType: 'regular',
-        text: 'Visualise argument values',
-        callback: () => {
-          globals.dispatch(Actions.addVisualisedArg({argName: fullKey}));
+        icon: 'call_made',
+        onclick: () => {
+          globals.makeSelection(
+              Actions.selectChromeSlice({id, trackId, table: 'slice'}));
+          // Ideally we want to have a callback to
+          // findCurrentSelection after this selection has been
+          // made. Here we do not have the info for horizontally
+          // scrolling to ts.
+          verticalScrollToTrack(trackId, true);
         },
       },
-    ];
+      name);
+}
+
+function renderSummary(children: Argument<ArgValue>[]): m.Children {
+  const summary = children.slice(0, 2).map(({key}) => key).join(', ');
+  const remaining = children.length - 2;
+  if (remaining > 0) {
+    return `{${summary}, ... (${remaining} more items)}`;
+  } else {
+    return `{${summary}}`;
+  }
+}
+
+// Format any number of keys into a composite key with standardized formatting.
+function stringifyKey(...key: Key[]): string {
+  return key
+      .map((element, index) => {
+        if (typeof element === 'number') {
+          return `[${element}]`;
+        } else {
+          return (index === 0 ? '' : '.') + element;
+        }
+      })
+      .join('');
+}
+
+function renderArgTreeNodes(args: Argument<ArgValue>[]): m.Children {
+  return args.map((arg) => {
+    const {key, path, value, children} = arg;
+    if (children && children.length === 1) {
+      // If we only have one child, collapse into self and combine keys
+      const child = children[0];
+      const compositeArg = {
+        ...child,
+        key: stringifyKey(key, child.key),
+      };
+      return renderArgTreeNodes([compositeArg]);
+    } else {
+      return m(
+          TreeNode,
+          {
+            left: renderArgKey(stringifyKey(key), path, value),
+            right: exists(value) && renderArgValue(value),
+            summary: children && renderSummary(children),
+          },
+          children && renderArgTreeNodes(children),
+      );
+    }
+  });
+}
+
+interface Sliceish extends SliceDetails {
+  ts: TPTime;
+  dur: TPDuration;
+  name: string;
+}
+
+function isSliceish(slice: SliceDetails): slice is Sliceish {
+  return exists(slice.ts) && exists(slice.dur) && exists(slice.name);
+}
+
+function getDisplayName(name: string|undefined, id: number|undefined): string|
+    undefined {
+  if (name === undefined) {
+    return id === undefined ? undefined : `${id}`;
+  } else {
+    return id === undefined ? name : `${name} ${id}`;
+  }
+}
+
+function computeDuration(ts: TPTime, dur: TPDuration): string {
+  return dur === -1n ? `${globals.state.traceTime.end - ts} (Did not end)` :
+                       tpTimeToCode(dur);
+}
+
+export class ChromeSliceDetailsPanel implements m.ClassComponent {
+  view() {
+    const slice = globals.sliceDetails;
+    if (isSliceish(slice)) {
+      return m(
+          DetailsShell,
+          {
+            title: 'Slice',
+            description: slice.name,
+            buttons: this.renderContextButton(slice),
+          },
+          m(
+              GridLayout,
+              this.renderDetails(slice),
+              this.renderRhs(slice),
+              ),
+      );
+    } else {
+      return m(DetailsShell, {title: 'Slice', description: 'Loading...'});
+    }
   }
 
-  renderRhs(sliceInfo: SliceDetails): m.Vnode {
-    const builders = new Map<string, TableBuilder>();
-
-    const immediatelyPrecedingByFlowSlices = [];
-    const immediatelyFollowingByFlowSlices = [];
-    for (const flow of globals.connectedFlows) {
-      if (flow.begin.sliceId === sliceInfo.id) {
-        immediatelyFollowingByFlowSlices.push({flow: flow.end, dur: flow.dur});
-      }
-      if (flow.end.sliceId === sliceInfo.id) {
-        immediatelyPrecedingByFlowSlices.push(
-            {flow: flow.begin, dur: flow.dur});
-      }
+  private renderRhs(slice: Sliceish): m.Children {
+    const precFlows = this.renderPrecedingFlows(slice);
+    const followingFlows = this.renderFollowingFlows(slice);
+    const args = this.renderArguments(slice);
+    if (precFlows ?? followingFlows ?? args) {
+      return m(
+          Column,
+          precFlows,
+          followingFlows,
+          args,
+      );
+    } else {
+      return undefined;
     }
+  }
 
-    // This is Chrome-specific bits:
-    const isRunTask = sliceInfo.name === 'ThreadControllerImpl::RunTask' ||
-        sliceInfo.name === 'ThreadPool_RunTask';
-    const isPostTask = sliceInfo.name === 'ThreadPool_PostTask' ||
-        sliceInfo.name === 'SequenceManager PostTask';
+  private renderDetails(slice: Sliceish) {
+    return m(
+        Section,
+        {title: 'Details'},
+        m(Tree,
+          m(TreeNode, {left: 'Name', right: slice.name}),
+          m(TreeNode, {
+            left: 'Category',
+            right: !slice.category || slice.category === '[NULL]' ?
+                'N/A' :
+                slice.category,
+          }),
+          m(TreeNode, {
+            left: 'Start time',
+            right: m(Timestamp, {ts: asTPTimestamp(slice.ts)}),
+          }),
+          exists(slice.absTime) &&
+              m(TreeNode, {left: 'Absolute Time', right: slice.absTime}),
+          m(TreeNode, {
+            left: 'Duration',
+            right: computeDuration(slice.ts, slice.dur),
+          }),
+          this.renderThreadDuration(slice),
+          Array.from(this.getProcessThreadDetails(slice))
+              .map(
+                  ([key, value]) =>
+                      exists(value) && m(TreeNode, {left: key, right: value})),
+          m(TreeNode, {
+            left: 'SQL ID',
+            right: m(SqlRef, {table: 'slice', id: slice.id}),
+          }),
+          slice.description &&
+              Array.from(slice.description)
+                  .map(
+                      ([key, value]) => m(TreeNode, {left: key, right: value}),
+                      )));
+  }
 
-    // RunTask and PostTask are always same-process, so we can skip
-    // emitting process name for them.
-    this.fillFlowPanel(
-        'Preceding flows',
-        immediatelyPrecedingByFlowSlices,
-        !isRunTask,
-        builders);
-    this.fillFlowPanel(
-        'Following flows',
-        immediatelyFollowingByFlowSlices,
-        !isPostTask,
-        builders);
+  private getProcessThreadDetails(sliceInfo: SliceDetails) {
+    return new Map<string, string|undefined>([
+      ['Thread', getDisplayName(sliceInfo.threadName, sliceInfo.tid)],
+      ['Process', getDisplayName(sliceInfo.processName, sliceInfo.pid)],
+      ['User ID', sliceInfo.uid ? String(sliceInfo.uid) : undefined],
+      ['Package name', sliceInfo.packageName],
+      [
+        'Version code',
+        sliceInfo.versionCode ? String(sliceInfo.versionCode) : undefined,
+      ],
+    ]);
+  }
 
-    const argsBuilder = new TableBuilder();
-    this.fillArgs(sliceInfo, argsBuilder);
-    builders.set('Arguments', argsBuilder);
-
-    const rows: m.Vnode<any, any>[] = [];
-    for (const [name, builder] of builders) {
-      rows.push(m('h3', name));
-      rows.push(this.renderTable(builder));
+  private renderThreadDuration(sliceInfo: Sliceish) {
+    if (exists(sliceInfo.threadTs) && exists(sliceInfo.threadDur)) {
+      // If we have valid thread duration, also display a percentage of
+      // |threadDur| compared to |dur|.
+      const ratio = tpDurationToSeconds(sliceInfo.threadDur) /
+          tpDurationToSeconds(sliceInfo.dur);
+      const threadDurFractionSuffix =
+          sliceInfo.threadDur === -1n ? '' : ` (${(ratio * 100).toFixed(2)}%)`;
+      return m(TreeNode, {
+        left: 'Thread duration',
+        right: computeDuration(sliceInfo.threadTs, sliceInfo.threadDur) +
+            threadDurFractionSuffix,
+      });
+    } else {
+      return undefined;
     }
+  }
 
+  private renderPrecedingFlows(slice: Sliceish): m.Children {
+    const flows = globals.connectedFlows;
+    const inFlows = flows.filter(({end}) => end.sliceId === slice.id);
+
+    if (inFlows.length > 0) {
+      const isRunTask = slice.name === 'ThreadControllerImpl::RunTask' ||
+          slice.name === 'ThreadPool_RunTask';
+
+      return m(
+          Section,
+          {title: 'Preceding Flows'},
+          m(
+              Tree,
+              inFlows.map(
+                  ({begin, dur}) => this.renderFlow(begin, dur, !isRunTask)),
+              ));
+    } else {
+      return null;
+    }
+  }
+
+  private renderFollowingFlows(slice: Sliceish): m.Children {
+    const flows = globals.connectedFlows;
+    const outFlows = flows.filter(({begin}) => begin.sliceId === slice.id);
+
+    if (outFlows.length > 0) {
+      const isPostTask = slice.name === 'ThreadPool_PostTask' ||
+          slice.name === 'SequenceManager PostTask';
+
+      return m(
+          Section,
+          {title: 'Following Flows'},
+          m(
+              Tree,
+              outFlows.map(
+                  ({end, dur}) => this.renderFlow(end, dur, !isPostTask)),
+              ));
+    } else {
+      return null;
+    }
+  }
+
+  private renderFlow(
+      flow: FlowPoint, dur: TPDuration,
+      includeProcessName: boolean): m.Children {
+    const sliceId = flow.sliceId;
+    const trackId = globals.state.uiTrackIdByTraceTrackId[flow.trackId];
+    const description = flow.sliceChromeCustomName === undefined ?
+        flow.sliceName :
+        flow.sliceChromeCustomName;
+    const sliceLink = renderSliceLink(sliceId, trackId, description);
+    const threadName = includeProcessName ?
+        `${flow.threadName} (${flow.processName})` :
+        flow.threadName;
+    return m(
+        TreeNode,
+        {left: 'Flow'},
+        m(TreeNode, {left: 'Slice', right: sliceLink}),
+        m(TreeNode, {left: 'Delay', right: tpTimeToCode(dur)}),
+        m(TreeNode, {left: 'Thread', right: threadName}),
+    );
+  }
+
+  private renderArguments(slice: Sliceish): m.Children {
+    if (slice.args && slice.args.size > 0) {
+      const tree = convertArgsToTree(slice.args);
+      return m(
+          Section, {title: 'Arguments'}, m(Tree, renderArgTreeNodes(tree)));
+    } else {
+      return undefined;
+    }
+  }
+
+  private renderContextButton(sliceInfo: SliceDetails): m.Children {
     const contextMenuItems = getSliceContextMenuItems(sliceInfo);
     if (contextMenuItems.length > 0) {
-      rows.push(
-          m(PopupMenuButton,
-            {
-              icon: 'arrow_drop_down',
-              items: contextMenuItems,
-            },
-            'Contextual Options'));
+      const trigger = m(Button, {
+        minimal: true,
+        compact: true,
+        label: 'Contextual Options',
+        rightIcon: Icons.ContextMenu,
+      });
+      return m(
+          PopupMenu2,
+          {trigger},
+          contextMenuItems.map(
+              ({name, getAction}) =>
+                  m(MenuItem, {label: name, onclick: getAction})),
+      );
+    } else {
+      return undefined;
     }
-
-    return m('.half-width-panel', rows);
-  }
-
-  renderTable(builder: TableBuilder, additionalClasses: string = ''): m.Vnode {
-    const rows: m.Vnode[] = [];
-    for (const row of builder.rows) {
-      const renderedRow: m.Vnode[] = [];
-      const paddingLeft = `${row.indentLevel * 20}px`;
-      if (isTableHeader(row.contents)) {
-        renderedRow.push(
-            m('th',
-              {
-                colspan: 2,
-                title: row.tooltip,
-                style: {'padding-left': paddingLeft},
-              },
-              row.contents.header));
-      } else {
-        const contents: any[] = [row.contents.key];
-        if (row.contents.isArg) {
-          contents.push(
-              m('span.context-wrapper', m.trust('&nbsp;'), m(PopupMenuButton, {
-                  icon: 'arrow_drop_down',
-                  items: this.getArgumentContextMenuItems(row.contents),
-                })));
-        }
-
-        renderedRow.push(
-            m('th',
-              {title: row.tooltip, style: {'padding-left': paddingLeft}},
-              contents));
-        const value = row.contents.value;
-        if (typeof value === 'string') {
-          renderedRow.push(m('td.value', this.mayLinkify(value)));
-        } else {
-          // Type of value being a record is not propagated into the callback
-          // for some reason, extracting necessary parts as constants instead.
-          const sliceId = value.sliceId;
-          const trackId = value.trackId;
-          renderedRow.push(
-              m('td',
-                m('i.material-icons.grey',
-                  {
-                    onclick: () => {
-                      globals.makeSelection(Actions.selectChromeSlice(
-                          {id: sliceId, trackId, table: 'slice'}));
-                      // Ideally we want to have a callback to
-                      // findCurrentSelection after this selection has been
-                      // made. Here we do not have the info for horizontally
-                      // scrolling to ts.
-                      verticalScrollToTrack(trackId, true);
-                    },
-                    title: 'Go to destination slice',
-                  },
-                  'call_made'),
-                value.description));
-        }
-      }
-
-      rows.push(m('tr', renderedRow));
-    }
-
-    return m(`table.auto-layout${additionalClasses}`, rows);
-  }
-
-  private mayLinkify(what: string): string|m.Vnode {
-    if (what.startsWith('http://') || what.startsWith('https://')) {
-      return m('a', {href: what, target: '_blank'}, what);
-    }
-    return what;
   }
 }
diff --git a/ui/src/frontend/counter_panel.ts b/ui/src/frontend/counter_panel.ts
index 4237773..e1710a8 100644
--- a/ui/src/frontend/counter_panel.ts
+++ b/ui/src/frontend/counter_panel.ts
@@ -15,47 +15,54 @@
 import m from 'mithril';
 
 import {tpTimeToCode} from '../common/time';
+
 import {globals} from './globals';
-import {Panel} from './panel';
+import {asTPTimestamp} from './sql_types';
+import {DetailsShell} from './widgets/details_shell';
+import {GridLayout} from './widgets/grid_layout';
+import {Section} from './widgets/section';
+import {Timestamp} from './widgets/timestamp';
+import {Tree, TreeNode} from './widgets/tree';
 
-interface CounterDetailsPanelAttrs {}
-
-export class CounterDetailsPanel extends Panel<CounterDetailsPanelAttrs> {
+export class CounterDetailsPanel implements m.ClassComponent {
   view() {
     const counterInfo = globals.counterDetails;
     if (counterInfo && counterInfo.startTime &&
         counterInfo.name !== undefined && counterInfo.value !== undefined &&
         counterInfo.delta !== undefined && counterInfo.duration !== undefined) {
       return m(
-          '.details-panel',
-          m('.details-panel-heading', m('h2', `Counter Details`)),
-          m(
-              '.details-table',
-              [m('table',
-                 [
-                   m('tr', m('th', `Name`), m('td', `${counterInfo.name}`)),
-                   m('tr',
-                     m('th', `Start time`),
-                     m('td',
-                       `${
-                           tpTimeToCode(
-                               counterInfo.startTime -
-                               globals.state.traceTime.start)}`)),
-                   m('tr',
-                     m('th', `Value`),
-                     m('td', `${counterInfo.value.toLocaleString()}`)),
-                   m('tr',
-                     m('th', `Delta`),
-                     m('td', `${counterInfo.delta.toLocaleString()}`)),
-                   m('tr',
-                     m('th', `Duration`),
-                     m('td', `${tpTimeToCode(counterInfo.duration)}`)),
-                 ])],
-              ));
+          DetailsShell,
+          {title: 'Counter', description: `${counterInfo.name}`},
+          m(GridLayout,
+            m(
+                Section,
+                {title: 'Properties'},
+                m(
+                    Tree,
+                    m(TreeNode, {left: 'Name', right: `${counterInfo.name}`}),
+                    m(TreeNode, {
+                      left: 'Start time',
+                      right:
+                          m(Timestamp,
+                            {ts: asTPTimestamp(counterInfo.startTime)}),
+                    }),
+                    m(TreeNode, {
+                      left: 'Value',
+                      right: `${counterInfo.value.toLocaleString()}`,
+                    }),
+                    m(TreeNode, {
+                      left: 'Delta',
+                      right: `${counterInfo.delta.toLocaleString()}`,
+                    }),
+                    m(TreeNode, {
+                      left: 'Duration',
+                      right: `${tpTimeToCode(counterInfo.duration)}`,
+                    }),
+                    ),
+                )),
+      );
     } else {
-      return m(
-          '.details-panel',
-          m('.details-panel-heading', m('h2', `Counter Details`)));
+      return m(DetailsShell, {title: 'Counter', description: 'Loading...'});
     }
   }
 
diff --git a/ui/src/frontend/ftrace_panel.ts b/ui/src/frontend/ftrace_panel.ts
index f66cddf..02b3d12 100644
--- a/ui/src/frontend/ftrace_panel.ts
+++ b/ui/src/frontend/ftrace_panel.ts
@@ -18,16 +18,19 @@
 import {assertExists} from '../base/logging';
 import {Actions} from '../common/actions';
 import {colorForString} from '../common/colorizer';
-import {formatTPTime, TPTime} from '../common/time';
+import {TPTime} from '../common/time';
 
 import {globals} from './globals';
 import {Panel} from './panel';
+import {asTPTimestamp} from './sql_types';
+import {DetailsShell} from './widgets/details_shell';
 import {
   MultiSelect,
   MultiSelectDiff,
   Option as MultiSelectOption,
 } from './widgets/multiselect';
 import {PopupPosition} from './widgets/popup';
+import {Timestamp} from './widgets/timestamp';
 
 const ROW_H = 20;
 const PAGE_SIZE = 250;
@@ -55,16 +58,12 @@
 
   view(_: m.CVnode<{}>) {
     return m(
-        '.ftrace-panel',
-        m(
-            '.sticky',
-            [
-              this.renderRowsLabel(),
-              this.renderFilterPanel(),
-            ],
-            ),
-        this.renderRows(),
-    );
+        DetailsShell,
+        {
+          title: this.renderTitle(),
+          buttons: this.renderFilterPanel(),
+        },
+        m('.ftrace-panel', this.renderRows()));
   }
 
   private scrollContainer(dom: Element): HTMLElement {
@@ -125,12 +124,12 @@
     globals.dispatch(Actions.setHoverCursorTimestamp({ts: -1n}));
   }
 
-  private renderRowsLabel() {
+  private renderTitle() {
     if (globals.ftracePanelData) {
       const {numEvents} = globals.ftracePanelData;
-      return m('.ftrace-rows-label', `Ftrace Events (${numEvents})`);
+      return `Ftrace Events (${numEvents})`;
     } else {
-      return m('.ftrace-rows-label', 'Ftrace Rows');
+      return 'Ftrace Rows';
     }
   }
 
@@ -152,7 +151,9 @@
     return m(
         MultiSelect,
         {
-          label: 'Filter by name',
+          label: 'Filter',
+          minimal: true,
+          compact: true,
           icon: 'filter_list_alt',
           popupPosition: PopupPosition.Top,
           options,
@@ -188,7 +189,7 @@
       for (let i = 0; i < events.length; i++) {
         const {ts, name, cpu, process, args} = events[i];
 
-        const timestamp = formatTPTime(ts - globals.state.traceTime.start);
+        const timestamp = m(Timestamp, {ts: asTPTimestamp(ts), minimal: true});
 
         const rank = i + offset;
 
diff --git a/ui/src/frontend/globals.ts b/ui/src/frontend/globals.ts
index 1518987..154ba28 100644
--- a/ui/src/frontend/globals.ts
+++ b/ui/src/frontend/globals.ts
@@ -16,7 +16,7 @@
 import {assertExists} from '../base/logging';
 import {Actions, DeferredAction} from '../common/actions';
 import {AggregateData} from '../common/aggregation_data';
-import {Args, ArgsTree} from '../common/arg_types';
+import {Args} from '../common/arg_types';
 import {
   ConversionJobName,
   ConversionJobStatus,
@@ -76,7 +76,6 @@
   packageName?: string;
   versionCode?: number;
   args?: Args;
-  argsTree?: ArgsTree;
   description?: Description;
 }
 
diff --git a/ui/src/frontend/logs_panel.ts b/ui/src/frontend/logs_panel.ts
index 8c50589..030763e 100644
--- a/ui/src/frontend/logs_panel.ts
+++ b/ui/src/frontend/logs_panel.ts
@@ -23,12 +23,14 @@
   LogEntries,
   LogEntriesKey,
 } from '../common/logs';
-import {formatTPTime, TPTime} from '../common/time';
+import {TPTime} from '../common/time';
 
 import {SELECTED_LOG_ROWS_COLOR} from './css_constants';
 import {globals} from './globals';
 import {LOG_PRIORITIES, LogsFilters} from './logs_filters';
 import {Panel} from './panel';
+import {asTPTimestamp} from './sql_types';
+import {Timestamp} from './widgets/timestamp';
 
 const ROW_H = 20;
 
@@ -154,7 +156,7 @@
                 'onmouseover': this.onRowOver.bind(this, ts),
                 'onmouseout': this.onRowOut.bind(this),
               },
-              m('.cell', formatTPTime(ts - globals.state.traceTime.start)),
+              m('.cell', m(Timestamp, {ts: asTPTimestamp(ts), minimal: true})),
               m('.cell', priorityLetter || '?'),
               m('.cell', tags[i]),
               hasProcessNames ? m('.cell.with-process', processNames[i]) :
diff --git a/ui/src/frontend/semantic_icons.ts b/ui/src/frontend/semantic_icons.ts
new file mode 100644
index 0000000..e30eb99
--- /dev/null
+++ b/ui/src/frontend/semantic_icons.ts
@@ -0,0 +1,21 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use size file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+export class Icons {
+  static readonly ExternalLink = 'open_in_new';     // Could be undefined
+  static readonly UpdateSelection = 'call_made';    // Could be 'open_in_new'
+  static readonly ChangeViewport = 'query_stats';   // Could be 'search'
+  static readonly ContextMenu = 'arrow_drop_down';  // Could be 'more_vert'
+  static readonly Copy = 'content_copy';
+}
diff --git a/ui/src/frontend/slice_details_panel.ts b/ui/src/frontend/slice_details_panel.ts
index 13e3dd5..eaab6ad 100644
--- a/ui/src/frontend/slice_details_panel.ts
+++ b/ui/src/frontend/slice_details_panel.ts
@@ -16,10 +16,19 @@
 
 import {Actions} from '../common/actions';
 import {translateState} from '../common/thread_state';
-import {tpTimeToCode} from '../common/time';
+import {formatTime, tpTimeToCode} from '../common/time';
+
+import {Anchor} from './anchor';
 import {globals, SliceDetails, ThreadDesc} from './globals';
 import {scrollToTrackAndTs} from './scroll_helper';
 import {SlicePanel} from './slice_panel';
+import {asTPTimestamp} from './sql_types';
+import {DetailsShell} from './widgets/details_shell';
+import {GridLayout} from './widgets/grid_layout';
+import {Section} from './widgets/section';
+import {SqlRef} from './widgets/sql_ref';
+import {Timestamp} from './widgets/timestamp';
+import {Tree, TreeNode} from './widgets/tree';
 
 export class SliceDetailsPanel extends SlicePanel {
   view() {
@@ -28,14 +37,25 @@
     const threadInfo = globals.threads.get(sliceInfo.utid);
 
     return m(
-        '.details-panel',
+        DetailsShell,
+        {
+          title: 'CPU Sched Slice',
+          description: this.renderDescription(sliceInfo),
+        },
         m(
-            '.details-panel-heading',
-            m('h2.split', `Slice Details`),
-            this.hasSchedLatencyInfo(sliceInfo) &&
-                m('h2.split', 'Scheduling Latency'),
+            GridLayout,
+            this.renderDetails(sliceInfo, threadInfo),
+            this.renderSchedLatencyInfo(sliceInfo),
             ),
-        this.renderDetails(sliceInfo, threadInfo));
+    );
+  }
+
+  private renderDescription(sliceInfo: SliceDetails) {
+    const threadInfo = globals.threads.get(sliceInfo.wakerUtid!);
+    if (!threadInfo) {
+      return null;
+    }
+    return `${threadInfo.procName} [${threadInfo.pid}]`;
   }
 
   private renderSchedLatencyInfo(sliceInfo: SliceDetails): m.Children {
@@ -43,12 +63,16 @@
       return null;
     }
     return m(
-        '.half-width-panel.slice-details-latency-panel',
-        m('img.slice-details-image', {
-          src: `${globals.root}assets/scheduling_latency.png`,
-        }),
-        this.renderWakeupText(sliceInfo),
-        this.renderDisplayLatencyText(sliceInfo),
+        Section,
+        {title: 'Scheduling Latency'},
+        m(
+            '.slice-details-latency-panel',
+            m('img.slice-details-image', {
+              src: `${globals.root}assets/scheduling_latency.png`,
+            }),
+            this.renderWakeupText(sliceInfo),
+            this.renderDisplayLatencyText(sliceInfo),
+            ),
     );
   }
 
@@ -60,8 +84,7 @@
     if (!threadInfo) {
       return null;
     }
-    const timestamp =
-        tpTimeToCode(sliceInfo.wakeupTs! - globals.state.traceTime.start);
+    const timestamp = formatTime(sliceInfo.wakeupTs!);
     return m(
         '.slice-details-wakeup-text',
         m('', `Wakeup @ ${timestamp} on CPU ${sliceInfo.wakerCpu} by`),
@@ -90,59 +113,81 @@
     return wakeupTs !== undefined && wakerUtid !== undefined;
   }
 
+  private renderThreadDuration(sliceInfo: SliceDetails) {
+    if (sliceInfo.threadDur !== undefined && sliceInfo.threadTs !== undefined) {
+      return m(TreeNode, {
+        icon: 'timer',
+        left: 'Thread Duration',
+        right: this.computeDuration(sliceInfo.threadTs, sliceInfo.threadDur),
+      });
+    } else {
+      return null;
+    }
+  }
+
   private renderDetails(sliceInfo: SliceDetails, threadInfo?: ThreadDesc):
       m.Children {
     if (!threadInfo || sliceInfo.ts === undefined ||
         sliceInfo.dur === undefined) {
       return null;
     } else {
-      const tableRows = [
-        m('tr',
-          m('th', `Process`),
-          m('td', `${threadInfo.procName} [${threadInfo.pid}]`)),
-        m('tr',
-          m('th', `Thread`),
-          m('td',
-            `${threadInfo.threadName} [${threadInfo.tid}]`,
-            m('i.material-icons.grey',
-              {onclick: () => this.goToThread(), title: 'Go to thread'},
-              'call_made'))),
-        m('tr', m('th', `Cmdline`), m('td', threadInfo.cmdline)),
-        m('tr',
-          m('th', `Start time`),
-          m('td',
-            `${tpTimeToCode(sliceInfo.ts - globals.state.traceTime.start)}`)),
-        m('tr',
-          m('th', `Duration`),
-          m('td', this.computeDuration(sliceInfo.ts, sliceInfo.dur))),
-        (sliceInfo.threadDur === undefined ||
-         sliceInfo.threadTs === undefined) ?
-            '' :
-            m('tr',
-              m('th', 'Thread duration'),
-              m('td',
-                this.computeDuration(sliceInfo.threadTs, sliceInfo.threadDur))),
-        m('tr', m('th', `Prio`), m('td', `${sliceInfo.priority}`)),
-        m('tr',
-          m('th', `End State`),
-          m('td', translateState(sliceInfo.endState))),
-        m('tr',
-          m('th', `Slice ID`),
-          m('td',
-            (sliceInfo.id !== undefined) ? sliceInfo.id.toString() :
-                                           'Unknown')),
-      ];
+      const extras: m.Children = [];
 
       for (const [key, value] of this.getProcessThreadDetails(sliceInfo)) {
         if (value !== undefined) {
-          tableRows.push(m('tr', m('th', key), m('td', value)));
+          extras.push(m(TreeNode, {left: key, right: value}));
         }
       }
 
+      const treeNodes = [
+        m(TreeNode, {
+          left: 'Process',
+          right: `${threadInfo.procName} [${threadInfo.pid}]`,
+        }),
+        m(TreeNode, {
+          left: 'Thread',
+          right:
+              m(Anchor,
+                {
+                  icon: 'call_made',
+                  onclick: () => {
+                    this.goToThread();
+                  },
+                },
+                `${threadInfo.threadName} [${threadInfo.tid}]`),
+        }),
+        m(TreeNode, {
+          left: 'Cmdline',
+          right: threadInfo.cmdline,
+        }),
+        m(TreeNode, {
+          left: 'Start time',
+          right: m(Timestamp, {ts: asTPTimestamp(sliceInfo.ts)}),
+        }),
+        m(TreeNode, {
+          left: 'Duration',
+          right: this.computeDuration(sliceInfo.ts, sliceInfo.dur),
+        }),
+        this.renderThreadDuration(sliceInfo),
+        m(TreeNode, {
+          left: 'Prio',
+          right: sliceInfo.priority,
+        }),
+        m(TreeNode, {
+          left: 'End State',
+          right: translateState(sliceInfo.endState),
+        }),
+        m(TreeNode, {
+          left: 'SQL ID',
+          right: m(SqlRef, {table: 'sched', id: sliceInfo.id}),
+        }),
+        ...extras,
+      ];
+
       return m(
-          '.details-table-multicolumn',
-          m('table.half-width-panel', tableRows),
-          this.renderSchedLatencyInfo(sliceInfo),
+          Section,
+          {title: 'Details'},
+          m(Tree, treeNodes),
       );
     }
   }
diff --git a/ui/src/frontend/sql/slice.ts b/ui/src/frontend/sql/slice.ts
new file mode 100644
index 0000000..4fdc41b
--- /dev/null
+++ b/ui/src/frontend/sql/slice.ts
@@ -0,0 +1,187 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import m from 'mithril';
+
+import {Actions} from '../../common/actions';
+import {EngineProxy} from '../../common/engine';
+import {LONG, NUM, STR} from '../../common/query_result';
+import {TPDuration} from '../../common/time';
+import {Anchor} from '../anchor';
+import {globals} from '../globals';
+import {focusHorizontalRange, verticalScrollToTrack} from '../scroll_helper';
+import {
+  asSliceSqlId,
+  asUpid,
+  asUtid,
+  SliceSqlId,
+  TPTimestamp,
+  Upid,
+  Utid,
+} from '../sql_types';
+import {asTPTimestamp} from '../sql_types';
+import {constraintsToQueryFragment, SQLConstraints} from '../sql_utils';
+import {
+  getProcessInfo,
+  getThreadInfo,
+  ProcessInfo,
+  ThreadInfo,
+} from '../thread_and_process_info';
+
+export interface SliceDetails {
+  id: SliceSqlId;
+  name: string;
+  ts: TPTimestamp;
+  dur: TPDuration;
+  sqlTrackId: number;
+  thread?: ThreadInfo;
+  process?: ProcessInfo;
+}
+
+async function getUtidAndUpid(engine: EngineProxy, sqlTrackId: number):
+    Promise<{utid?: Utid, upid?: Upid}> {
+  const columnInfo = (await engine.query(`
+    WITH
+       leafTrackTable AS (SELECT type FROM track WHERE id = ${sqlTrackId}),
+       cols AS (
+            SELECT name
+            FROM pragma_table_info((SELECT type FROM leafTrackTable))
+        )
+    SELECT
+       type as leafTrackTable,
+      'upid' in cols AS hasUpid,
+      'utid' in cols AS hasUtid
+    FROM leafTrackTable
+  `)).firstRow({hasUpid: NUM, hasUtid: NUM, leafTrackTable: STR});
+  const hasUpid = columnInfo.hasUpid !== 0;
+  const hasUtid = columnInfo.hasUtid !== 0;
+
+  const result: {utid?: Utid, upid?: Upid} = {};
+
+  if (hasUtid) {
+    const utid = (await engine.query(`
+        SELECT utid
+        FROM ${columnInfo.leafTrackTable}
+        WHERE id = ${sqlTrackId};
+    `)).firstRow({
+         utid: NUM,
+       }).utid;
+    result.utid = asUtid(utid);
+  } else if (hasUpid) {
+    const upid = (await engine.query(`
+        SELECT upid
+        FROM ${columnInfo.leafTrackTable}
+        WHERE id = ${sqlTrackId};
+    `)).firstRow({
+         upid: NUM,
+       }).upid;
+    result.upid = asUpid(upid);
+  }
+  return result;
+}
+
+async function getSliceFromConstraints(
+    engine: EngineProxy, constraints: SQLConstraints): Promise<SliceDetails[]> {
+  const query = await engine.query(`
+    SELECT
+      id,
+      name,
+      ts,
+      dur,
+      track_id as trackId
+    FROM slice
+    ${constraintsToQueryFragment(constraints)}`);
+  const it = query.iter({
+    id: NUM,
+    name: STR,
+    ts: LONG,
+    dur: LONG,
+    trackId: NUM,
+  });
+
+  const result: SliceDetails[] = [];
+  for (; it.valid(); it.next()) {
+    const {utid, upid} = await getUtidAndUpid(engine, it.trackId);
+
+    const thread: ThreadInfo|undefined =
+        utid === undefined ? undefined : await getThreadInfo(engine, utid);
+    const process: ProcessInfo|undefined = thread !== undefined ?
+        thread.process :
+        (upid === undefined ? undefined : await getProcessInfo(engine, upid));
+
+    result.push({
+      id: asSliceSqlId(it.id),
+      name: it.name,
+      ts: asTPTimestamp(it.ts),
+      dur: it.dur,
+      sqlTrackId: it.trackId,
+      thread,
+      process,
+    });
+  }
+  return result;
+}
+
+export async function getSlice(
+    engine: EngineProxy, id: SliceSqlId): Promise<SliceDetails|undefined> {
+  const result = await getSliceFromConstraints(engine, {
+    filters: [`id=${id}`],
+  });
+  if (result.length > 1) {
+    throw new Error(`slice table has more than one row with id ${id}`);
+  }
+  if (result.length === 0) {
+    return undefined;
+  }
+  return result[0];
+}
+
+interface SliceRefAttrs {
+  readonly id: SliceSqlId;
+  readonly name: string;
+  readonly ts: TPTimestamp;
+  readonly dur: TPDuration;
+  readonly sqlTrackId: number;
+}
+
+export class SliceRef implements m.ClassComponent<SliceRefAttrs> {
+  view(vnode: m.Vnode<SliceRefAttrs>) {
+    return m(
+        Anchor,
+        {
+          icon: 'open_in_new',
+          onclick: () => {
+            const uiTrackId =
+                globals.state.uiTrackIdByTraceTrackId[vnode.attrs.sqlTrackId];
+            if (uiTrackId === undefined) return;
+            verticalScrollToTrack(uiTrackId, true);
+            focusHorizontalRange(
+                vnode.attrs.ts, vnode.attrs.ts + vnode.attrs.dur);
+            globals.makeSelection(Actions.selectChromeSlice(
+                {id: vnode.attrs.id, trackId: uiTrackId, table: 'slice'}));
+          },
+        },
+        vnode.attrs.name);
+  }
+}
+
+export function sliceRef(slice: SliceDetails, name?: string): m.Child {
+  return m(SliceRef, {
+    id: slice.id,
+    name: name ?? slice.name,
+    ts: slice.ts,
+    dur: slice.dur,
+    sqlTrackId: slice.sqlTrackId,
+  });
+}
diff --git a/ui/src/frontend/sql_types.ts b/ui/src/frontend/sql_types.ts
index b7e2df2..fd810bc 100644
--- a/ui/src/frontend/sql_types.ts
+++ b/ui/src/frontend/sql_types.ts
@@ -63,12 +63,35 @@
   return v as (Utid | undefined);
 }
 
+// Id into |slice| SQL table.
+export type SliceSqlId = number&{
+  __type: 'SliceSqlId'
+}
+
+export function asSliceSqlId(v: number): SliceSqlId;
+export function asSliceSqlId(v?: number): SliceSqlId|undefined;
+export function asSliceSqlId(v?: number): SliceSqlId|undefined {
+  return v as (SliceSqlId | undefined);
+}
+
 // Id into |sched| SQL table.
 export type SchedSqlId = number&{
   __type: 'SchedSqlId'
 }
 
+export function asSchedSqlId(v: number): SchedSqlId;
+export function asSchedSqlId(v?: number): SchedSqlId|undefined;
+export function asSchedSqlId(v?: number): SchedSqlId|undefined {
+  return v as (SchedSqlId | undefined);
+}
+
 // Id into |thread_state| SQL table.
 export type ThreadStateSqlId = number&{
   __type: 'ThreadStateSqlId'
 }
+
+export function asThreadStateSqlId(v: number): ThreadStateSqlId;
+export function asThreadStateSqlId(v?: number): ThreadStateSqlId|undefined;
+export function asThreadStateSqlId(v?: number): ThreadStateSqlId|undefined {
+  return v as (ThreadStateSqlId | undefined);
+}
diff --git a/ui/src/frontend/sql_utils.ts b/ui/src/frontend/sql_utils.ts
index 422e70f..7a5aacc 100644
--- a/ui/src/frontend/sql_utils.ts
+++ b/ui/src/frontend/sql_utils.ts
@@ -21,24 +21,47 @@
 
 // Interface for defining constraints which can be passed to a SQL query.
 export interface SQLConstraints {
-  filters?: string[];
-  orderBy?: OrderClause[];
+  filters?: (undefined|string)[];
+  orderBy?: (undefined|string|OrderClause)[];
+  groupBy?: (undefined|string)[];
   limit?: number;
 }
 
+function isDefined<T>(t: T|undefined): t is T {
+  return t !== undefined;
+}
+
 // Formatting given constraints into a string which can be injected into
 // SQL query.
 export function constraintsToQueryFragment(c: SQLConstraints): string {
   const result: string[] = [];
-  if (c.filters && c.filters.length > 0) {
-    result.push(`WHERE ${c.filters.join(' and ')}`);
+
+  if (c.filters) {
+    const filters = c.filters.filter(isDefined);
+    if (filters.length > 0) {
+      result.push(`WHERE ${c.filters.join(' and ')}`);
+    }
   }
-  if (c.orderBy && c.orderBy.length > 0) {
-    const orderBys = c.orderBy.map((clause) => {
-      const direction = clause.direction ? ` ${clause.direction}` : '';
-      return `${clause.fieldName}${direction}`;
-    });
-    result.push(`ORDER BY ${orderBys.join(', ')}`);
+  if (c.groupBy) {
+    const groupBy = c.groupBy.filter(isDefined);
+    if (groupBy.length > 0) {
+      const groups = groupBy.join(', ');
+      result.push(`GROUP BY ${groups}`);
+    }
+  }
+  if (c.orderBy) {
+    const orderBy = c.orderBy.filter(isDefined);
+    if (orderBy.length > 0) {
+      const orderBys = orderBy.map((clause) => {
+        if (typeof clause === 'string') {
+          return clause;
+        } else {
+          const direction = clause.direction ? ` ${clause.direction}` : '';
+          return `${clause.fieldName}${direction}`;
+        }
+      });
+      result.push(`ORDER BY ${orderBys.join(', ')}`);
+    }
   }
   if (c.limit) {
     result.push(`LIMIT ${c.limit}`);
diff --git a/ui/src/frontend/sql_utils_unittest.ts b/ui/src/frontend/sql_utils_unittest.ts
index 0e5dc76..bbe3385 100644
--- a/ui/src/frontend/sql_utils_unittest.ts
+++ b/ui/src/frontend/sql_utils_unittest.ts
@@ -27,18 +27,38 @@
 
 test('constraintsToQueryFragment: order by', () => {
   expect(normalize(constraintsToQueryFragment({
-    orderBy: [{fieldName: 'name'}, {fieldName: 'count', direction: 'DESC'}],
-  }))).toEqual('ORDER BY name, count DESC');
+    orderBy: [
+      {fieldName: 'name'},
+      {fieldName: 'count', direction: 'DESC'},
+      undefined,
+      'value',
+    ],
+  }))).toEqual('ORDER BY name, count DESC, value');
 });
 
 test('constraintsToQueryFragment: limit', () => {
   expect(normalize(constraintsToQueryFragment({limit: 3}))).toEqual('LIMIT 3');
 });
 
+test('constraintsToQueryFragment: group by', () => {
+  expect(normalize(constraintsToQueryFragment({
+    groupBy: ['foo', undefined, 'bar'],
+  }))).toEqual('GROUP BY foo, bar');
+});
+
 test('constraintsToQueryFragment: all', () => {
   expect(normalize(constraintsToQueryFragment({
     filters: ['id != 1'],
+    groupBy: ['track_id'],
     orderBy: [{fieldName: 'ts'}],
     limit: 1,
-  }))).toEqual('WHERE id != 1 ORDER BY ts LIMIT 1');
+  }))).toEqual('WHERE id != 1 GROUP BY track_id ORDER BY ts LIMIT 1');
+});
+
+test('constraintsToQueryFragment: all undefined', () => {
+  expect(normalize(constraintsToQueryFragment({
+    filters: [undefined],
+    orderBy: [undefined, undefined],
+    groupBy: [undefined, undefined],
+  }))).toEqual('');
 });
diff --git a/ui/src/frontend/thread_and_process_info.ts b/ui/src/frontend/thread_and_process_info.ts
index d524e02..46ab39e 100644
--- a/ui/src/frontend/thread_and_process_info.ts
+++ b/ui/src/frontend/thread_and_process_info.ts
@@ -34,7 +34,7 @@
   versionCode?: number;
 }
 
-async function getProcessInfo(
+export async function getProcessInfo(
     engine: EngineProxy, upid: Upid): Promise<ProcessInfo> {
   const it = (await engine.query(`
               SELECT pid, name, uid FROM process WHERE upid = ${upid};
diff --git a/ui/src/frontend/thread_state.ts b/ui/src/frontend/thread_state.ts
index 0bc4082..a4a71a1 100644
--- a/ui/src/frontend/thread_state.ts
+++ b/ui/src/frontend/thread_state.ts
@@ -12,6 +12,8 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+import m from 'mithril';
+
 import {Actions} from '../common/actions';
 import {EngineProxy} from '../common/engine';
 import {LONG, NUM, NUM_NULL, STR_NULL} from '../common/query_result';
@@ -19,17 +21,16 @@
 import {
   TPDuration,
   TPTime,
-  tpTimeToCode,
 } from '../common/time';
+import {Anchor} from './anchor';
 
-import {copyToClipboard} from './clipboard';
 import {globals} from './globals';
-import {menuItem} from './popup_menu';
 import {scrollToTrackAndTs} from './scroll_helper';
 import {
   asUtid,
   SchedSqlId,
   ThreadStateSqlId,
+  Utid,
 } from './sql_types';
 import {
   constraintsToQueryFragment,
@@ -37,12 +38,9 @@
   SQLConstraints,
 } from './sql_utils';
 import {
-  getProcessName,
   getThreadInfo,
-  getThreadName,
   ThreadInfo,
 } from './thread_and_process_info';
-import {dict, Dict, maybeValue, Value, value} from './value';
 
 // Representation of a single thread state object, corresponding to
 // a row for the |thread_slice| table.
@@ -51,7 +49,7 @@
   threadStateSqlId: ThreadStateSqlId;
   // Id of the corresponding entry in the |sched| table.
   schedSqlId?: SchedSqlId;
-  // Timestamp of the the beginning of this thread state in nanoseconds.
+  // Timestamp of the beginning of this thread state in nanoseconds.
   ts: TPTime;
   // Duration of this thread state in nanoseconds.
   dur: TPDuration;
@@ -153,55 +151,52 @@
   scrollToTrackAndTs(trackId, ts);
 }
 
-function stateToValue(
-    state: string, cpu: number|undefined, id: SchedSqlId|undefined, ts: TPTime):
-    Value|null {
-  if (!state) {
-    return null;
-  }
-  if (id === undefined || cpu === undefined) {
-    return value(state);
-  }
-  return value(`${state} on CPU ${cpu}`, {
-    rightButton: {
-      action: () => {
-        goToSchedSlice(cpu, id, ts);
-      },
-      hoverText: 'Go to CPU slice',
-    },
-  });
+interface ThreadStateRefAttrs {
+  id: ThreadStateSqlId;
+  ts: TPTime;
+  dur: TPDuration;
+  utid: Utid;
+  // If not present, a placeholder name will be used.
+  name?: string;
 }
 
-export function threadStateToDict(state: ThreadState): Dict {
-  const result: {[name: string]: Value|null} = {};
+export class ThreadStateRef implements m.ClassComponent<ThreadStateRefAttrs> {
+  view(vnode: m.Vnode<ThreadStateRefAttrs>) {
+    return m(
+        Anchor,
+        {
+          icon: 'open_in_new',
+          onclick: () => {
+            let trackId: string|number|undefined;
+            for (const track of Object.values(globals.state.tracks)) {
+              if (track.kind === 'ThreadStateTrack' &&
+                  (track.config as {utid: number}).utid === vnode.attrs.utid) {
+                trackId = track.id;
+              }
+            }
 
-  result['Start time'] =
-      value(tpTimeToCode(state.ts - globals.state.traceTime.start));
-  result['Duration'] = value(tpTimeToCode(state.dur));
-  result['State'] =
-      stateToValue(state.state, state.cpu, state.schedSqlId, state.ts);
-  result['Blocked function'] = maybeValue(state.blockedFunction);
-  const process = state?.thread?.process;
-  result['Process'] = maybeValue(process ? getProcessName(process) : undefined);
-  const thread = state?.thread;
-  result['Thread'] = maybeValue(thread ? getThreadName(thread) : undefined);
-  if (state.wakerThread) {
-    const process = state.wakerThread.process;
-    result['Waker'] = dict({
-      'Process': maybeValue(process ? getProcessName(process) : undefined),
-      'Thread': maybeValue(getThreadName(state.wakerThread)),
-    });
+            if (trackId) {
+              globals.makeSelection(Actions.selectThreadState({
+                id: vnode.attrs.id,
+                trackId: trackId.toString(),
+              }));
+
+              scrollToTrackAndTs(trackId, vnode.attrs.ts, true);
+            }
+          },
+        },
+        vnode.attrs.name ?? `Thread State ${vnode.attrs.id}`,
+    );
   }
-  result['SQL id'] = value(`thread_state[${state.threadStateSqlId}]`, {
-    contextMenu: [
-      menuItem(
-          'Copy SQL query',
-          () => {
-            copyToClipboard(`select * from thread_state where id=${
-                state.threadStateSqlId}`);
-          }),
-    ],
-  });
+}
 
-  return dict(result);
+export function threadStateRef(state: ThreadState): m.Child {
+  if (state.thread === undefined) return null;
+
+  return m(ThreadStateRef, {
+    id: state.threadStateSqlId,
+    ts: state.ts,
+    dur: state.dur,
+    utid: state.thread?.utid,
+  });
 }
diff --git a/ui/src/frontend/thread_state_tab.ts b/ui/src/frontend/thread_state_tab.ts
index f1b8662..b086104 100644
--- a/ui/src/frontend/thread_state_tab.ts
+++ b/ui/src/frontend/thread_state_tab.ts
@@ -14,11 +14,24 @@
 
 import m from 'mithril';
 
+import {TPTime, tpTimeToCode} from '../common/time';
+
+import {Anchor} from './anchor';
 import {BottomTab, bottomTabRegistry, NewBottomTabArgs} from './bottom_tab';
 import {globals} from './globals';
-import {ThreadStateSqlId} from './sql_types';
-import {getThreadState, ThreadState, threadStateToDict} from './thread_state';
-import {renderDict} from './value';
+import {asTPTimestamp, SchedSqlId, ThreadStateSqlId} from './sql_types';
+import {
+  getProcessName,
+  getThreadName,
+  ThreadInfo,
+} from './thread_and_process_info';
+import {getThreadState, goToSchedSlice, ThreadState} from './thread_state';
+import {DetailsShell} from './widgets/details_shell';
+import {GridLayout} from './widgets/grid_layout';
+import {Section} from './widgets/section';
+import {SqlRef} from './widgets/sql_ref';
+import {Timestamp} from './widgets/timestamp';
+import {Tree, TreeNode} from './widgets/tree';
 
 interface ThreadStateTabConfig {
   // Id into |thread_state| sql table.
@@ -50,23 +63,94 @@
     return 'Current Selection';
   }
 
-  renderTabContents(): m.Child {
-    if (!this.loaded) {
-      return m('h2', 'Loading');
-    }
-    if (!this.state) {
-      return m('h2', `Thread state ${this.config.id} does not exist`);
-    }
-    return renderDict(threadStateToDict(this.state));
+  viewTab() {
+    // TODO(altimin/stevegolton): Differentiate between "Current Selection" and
+    // "Pinned" views in DetailsShell.
+    return m(
+        DetailsShell,
+        {title: 'Thread State', description: this.renderLoadingText()},
+        m(GridLayout,
+          m(
+              Section,
+              {title: 'Details'},
+              this.state && this.renderTree(this.state),
+              )),
+    );
   }
 
-  viewTab() {
-    // TODO(altimin): Create a reusable component for showing the header and
-    // differentiate between "Current Selection" and "Pinned" views.
+  private renderLoadingText() {
+    if (!this.loaded) {
+      return 'Loading';
+    }
+    if (!this.state) {
+      return `Thread state ${this.config.id} does not exist`;
+    }
+    // TODO(stevegolton): Return something intelligent here.
+    return this.config.id;
+  }
+
+  private renderTree(state: ThreadState) {
+    const thread = state.thread;
+    const process = state.thread?.process;
     return m(
-        'div.details-panel',
-        m('header.overview', m('span', 'Thread State')),
-        this.renderTabContents());
+        Tree,
+        m(TreeNode, {
+          left: 'Start time',
+          right: m(Timestamp, {ts: asTPTimestamp(state.ts)}),
+        }),
+        m(TreeNode, {
+          left: 'Duration',
+          right: tpTimeToCode(state.dur),
+        }),
+        m(TreeNode, {
+          left: 'State',
+          right: this.renderState(
+              state.state, state.cpu, state.schedSqlId, state.ts),
+        }),
+        state.blockedFunction && m(TreeNode, {
+          left: 'Blocked function',
+          right: state.blockedFunction,
+        }),
+        process && m(TreeNode, {
+          left: 'Process',
+          right: getProcessName(process),
+        }),
+        thread && m(TreeNode, {left: 'Thread', right: getThreadName(thread)}),
+        state.wakerThread && this.renderWakerThread(state.wakerThread),
+        m(TreeNode, {
+          left: 'SQL ID',
+          right: m(SqlRef, {table: 'thread_state', id: state.threadStateSqlId}),
+        }),
+    );
+  }
+
+  private renderState(
+      state: string, cpu: number|undefined, id: SchedSqlId|undefined,
+      ts: TPTime): m.Children {
+    if (!state) {
+      return null;
+    }
+    if (id === undefined || cpu === undefined) {
+      return state;
+    }
+    return m(
+        Anchor,
+        {
+          title: 'Go to CPU slice',
+          icon: 'call_made',
+          onclick: () => goToSchedSlice(cpu, id, ts),
+        },
+        `${state} on CPU ${cpu}`);
+  }
+
+  private renderWakerThread(wakerThread: ThreadInfo) {
+    return m(
+        TreeNode,
+        {left: 'Waker'},
+        m(TreeNode,
+          {left: 'Process', right: getProcessName(wakerThread.process)}),
+        m(TreeNode, {left: 'Thread', right: getThreadName(wakerThread)}),
+    );
   }
 
   isLoading() {
diff --git a/ui/src/frontend/tickmark_panel.ts b/ui/src/frontend/tickmark_panel.ts
index 6076188..3266017 100644
--- a/ui/src/frontend/tickmark_panel.ts
+++ b/ui/src/frontend/tickmark_panel.ts
@@ -75,7 +75,7 @@
           size.height);
     }
     const index = globals.state.searchIndex;
-    if (index !== -1 && index <= globals.currentSearchResults.tsStarts.length) {
+    if (index !== -1 && index < globals.currentSearchResults.tsStarts.length) {
       const start = globals.currentSearchResults.tsStarts[index];
       const triangleStart =
           Math.max(visibleTimeScale.tpTimeToPx(start), 0) + TRACK_SHELL_WIDTH;
diff --git a/ui/src/frontend/track.ts b/ui/src/frontend/track.ts
index 9b59059..a2aa8dd 100644
--- a/ui/src/frontend/track.ts
+++ b/ui/src/frontend/track.ts
@@ -15,7 +15,7 @@
 import m from 'mithril';
 
 import {assertExists} from '../base/logging';
-import {Engine} from '../common/engine';
+import {EngineProxy} from '../common/engine';
 import {TrackState} from '../common/state';
 import {TPTime} from '../common/time';
 import {TrackData} from '../common/track_data';
@@ -27,7 +27,7 @@
 // Args passed to the track constructors when creating a new track.
 export interface NewTrackArgs {
   trackId: string;
-  engine: Engine;
+  engine: EngineProxy;
 }
 
 // This interface forces track implementations to have some static properties.
@@ -55,7 +55,7 @@
 export abstract class Track<Config = {}, Data extends TrackData = TrackData> {
   // The UI-generated track ID (not to be confused with the SQL track.id).
   protected readonly trackId: string;
-  protected readonly engine: Engine;
+  protected readonly engine: EngineProxy;
 
   // When true this is a new controller-less track type.
   // TODO(hjd): eventually all tracks will be controller-less and this
diff --git a/ui/src/frontend/track_group_panel.ts b/ui/src/frontend/track_group_panel.ts
index dbab7f7..05fe520 100644
--- a/ui/src/frontend/track_group_panel.ts
+++ b/ui/src/frontend/track_group_panel.ts
@@ -58,8 +58,11 @@
     const engineId = this.summaryTrackState.engineId;
     const engine = globals.engines.get(engineId);
     if (engine !== undefined) {
-      this.summaryTrack =
-          trackCreator.create({trackId: this.summaryTrackState.id, engine});
+      this.summaryTrack = trackCreator.create({
+        trackId: this.summaryTrackState.id,
+        engine: engine.getProxy(`Track; kind: ${
+            this.summaryTrackState.kind}; id: ${this.summaryTrackState.id}`),
+      });
     }
   }
 
diff --git a/ui/src/frontend/track_panel.ts b/ui/src/frontend/track_panel.ts
index b03d18a..4c7b2a5 100644
--- a/ui/src/frontend/track_panel.ts
+++ b/ui/src/frontend/track_panel.ts
@@ -332,7 +332,11 @@
       return;
     }
     const trackCreator = trackRegistry.get(trackState.kind);
-    this.track = trackCreator.create({trackId, engine});
+    this.track = trackCreator.create({
+      trackId,
+      engine:
+          engine.getProxy(`Track; kind: ${trackState.kind}; id: ${trackId}`),
+    });
     this.trackState = trackState;
   }
 
diff --git a/ui/src/frontend/value.ts b/ui/src/frontend/value.ts
index 9b866b0..bf1f769 100644
--- a/ui/src/frontend/value.ts
+++ b/ui/src/frontend/value.ts
@@ -15,6 +15,7 @@
 import m from 'mithril';
 
 import {PopupMenuButton, PopupMenuItem} from './popup_menu';
+import {Tree, TreeNode} from './widgets/tree';
 
 // This file implements a component for rendering JSON-like values (with
 // customisation options like context menu and action buttons).
@@ -128,62 +129,57 @@
 
 // Recursively render the given value and its children, returning a list of
 // vnodes corresponding to the nodes of the table.
-function*
-    renderValue(name: string, value: Value, depth: number): Generator<m.Child> {
-  const row = [
-    m('th',
-      {
-        style: `padding-left: ${15 * depth}px`,
-      },
-      name,
-      value.contextMenu ? m(PopupMenuButton, {
-        icon: 'arrow_drop_down',
-        items: value.contextMenu,
-      }) :
-                          null),
+function renderValue(name: string, value: Value): m.Children {
+  const left = [
+    name,
+    value.contextMenu ? m(PopupMenuButton, {
+      icon: 'arrow_drop_down',
+      items: value.contextMenu,
+    }) :
+                        null,
   ];
   if (isArray(value)) {
-    yield m('tr', row);
-    for (let i = 0; i < value.items.length; ++i) {
-      yield* renderValue(`[${i}]`, value.items[i], depth + 1);
-    }
-    return;
+    const nodes = value.items.map((value: Value, index: number) => {
+      return renderValue(`[${index}]`, value);
+    });
+    return m(TreeNode, {left, right: `array[${nodes.length}]`}, nodes);
   } else if (isDict(value)) {
-    yield m('tr', row);
+    const nodes: m.Children[] = [];
     for (const key of Object.keys(value.items)) {
-      yield* renderValue(key, value.items[key], depth + 1);
+      nodes.push(renderValue(key, value.items[key]));
     }
-    return;
-  }
-  const renderButton = (button?: ButtonParams) => {
-    if (!button) {
+    return m(TreeNode, {left, right: `dict`}, nodes);
+  } else {
+    const renderButton = (button?: ButtonParams) => {
+      if (!button) {
+        return null;
+      }
+      return m(
+          'i.material-icons.grey',
+          {
+            onclick: button.action,
+            title: button.hoverText,
+          },
+          button.icon ? button.icon : 'call_made');
+    };
+    if (value.kind === 'STRING') {
+      const right = [
+        renderButton(value.leftButton),
+        m('span', value.value),
+        renderButton(value.rightButton),
+      ];
+      return m(TreeNode, {left, right});
+    } else {
       return null;
     }
-    return m(
-        'i.material-icons.grey',
-        {
-          onclick: button.action,
-          title: button.hoverText,
-        },
-        button.icon ? button.icon : 'call_made');
-  };
-  if (value.kind === 'STRING') {
-    row.push(
-        m('td',
-          renderButton(value.leftButton),
-          m('span', value.value),
-          renderButton(value.rightButton)));
   }
-  yield m('tr', row);
 }
 
-// Render a given dictionary into a vnode.
+// Render a given dictionary to a tree.
 export function renderDict(dict: Dict): m.Child {
-  const rows: m.Child[] = [];
+  const rows: m.Children[] = [];
   for (const key of Object.keys(dict.items)) {
-    for (const vnode of renderValue(key, dict.items[key], 0)) {
-      rows.push(vnode);
-    }
+    rows.push(renderValue(key, dict.items[key]));
   }
-  return m('table.auto-layout', rows);
+  return m(Tree, rows);
 }
diff --git a/ui/src/frontend/widgets/details_shell.ts b/ui/src/frontend/widgets/details_shell.ts
new file mode 100644
index 0000000..7fe684b
--- /dev/null
+++ b/ui/src/frontend/widgets/details_shell.ts
@@ -0,0 +1,54 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import m from 'mithril';
+import {classNames} from '../classnames';
+
+interface DetailsShellAttrs {
+  title: m.Children;
+  description?: m.Children;
+  buttons?: m.Children;
+  // If true, this container will fill the parent, and content scrolling is
+  // expected to be handled internally.
+  // Defaults to false.
+  matchParent?: boolean;
+}
+
+// A shell for details panels to be more visually consistent.
+// It provides regular placement for the header bar and placement of buttons
+export class DetailsShell implements m.ClassComponent<DetailsShellAttrs> {
+  view({attrs, children}: m.Vnode<DetailsShellAttrs>) {
+    const {
+      title,
+      description,
+      buttons,
+      matchParent,
+    } = attrs;
+
+    return m(
+        'section.pf-details-shell',
+        {class: classNames(matchParent && 'pf-match-parent')},
+        m(
+            'header.pf-header-bar',
+            m('h1.pf-header-title', title),
+            m('span.pf-header-description', description),
+            m('nav.pf-header-buttons', buttons),
+            ),
+        m(
+            'article.pf-content',
+            children,
+            ),
+    );
+  }
+}
diff --git a/ui/src/frontend/widgets/grid_layout.ts b/ui/src/frontend/widgets/grid_layout.ts
new file mode 100644
index 0000000..a90eb4a
--- /dev/null
+++ b/ui/src/frontend/widgets/grid_layout.ts
@@ -0,0 +1,28 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import m from 'mithril';
+
+// Just some wrappers around CSS
+export class GridLayout implements m.ClassComponent {
+  view({children}: m.Vnode) {
+    return m('div.pf-grid-layout', children);
+  }
+}
+
+export class Column implements m.ClassComponent {
+  view({children}: m.Vnode) {
+    return m('div.pf-column', children);
+  }
+}
diff --git a/ui/src/frontend/widgets/icon.ts b/ui/src/frontend/widgets/icon.ts
index 91003ad..cfe990a 100644
--- a/ui/src/frontend/widgets/icon.ts
+++ b/ui/src/frontend/widgets/icon.ts
@@ -23,14 +23,16 @@
   filled?: boolean;
   // List of space separated class names forwarded to the icon.
   className?: string;
+  [htmlAttrs: string]: any;
 }
 
 export class Icon implements m.ClassComponent<IconAttrs> {
-  view(vnode: m.Vnode<IconAttrs>): m.Child {
-    const classes = classNames(vnode.attrs.className);
+  view({attrs}: m.Vnode<IconAttrs>): m.Child {
+    const {icon, filled, className, ...htmlAttrs} = attrs;
+    const classes = classNames(className);
     return m(
-        vnode.attrs.filled ? 'i.material-icons-filled' : 'i.material-icons',
-        {class: classes},
-        vnode.attrs.icon);
+        filled ? 'i.material-icons-filled' : 'i.material-icons',
+        {class: classes, ...htmlAttrs},
+        icon);
   }
 }
diff --git a/ui/src/frontend/widgets/menu.ts b/ui/src/frontend/widgets/menu.ts
index b521dab..a0f8b97 100644
--- a/ui/src/frontend/widgets/menu.ts
+++ b/ui/src/frontend/widgets/menu.ts
@@ -55,7 +55,7 @@
   }
 
   private renderNested({attrs, children}: m.CVnode<MenuItemAttrs>) {
-    const {rightIcon = 'chevron_right', closePopupOnClick = false, ...rest} =
+    const {rightIcon = 'arrow_right', closePopupOnClick = false, ...rest} =
         attrs;
 
     return m(
@@ -63,7 +63,7 @@
         {
           popupPosition: PopupPosition.RightStart,
           trigger: m(MenuItem, {
-            rightIcon: rightIcon ?? 'chevron_right',
+            rightIcon: rightIcon ?? 'arrow_right',
             closePopupOnClick,
             ...rest,
           }),
diff --git a/ui/src/frontend/widgets/multiselect.ts b/ui/src/frontend/widgets/multiselect.ts
index 77debb2..52e9b39 100644
--- a/ui/src/frontend/widgets/multiselect.ts
+++ b/ui/src/frontend/widgets/multiselect.ts
@@ -37,6 +37,8 @@
 
 export interface MultiSelectAttrs {
   icon?: string;
+  minimal?: boolean;
+  compact?: boolean;
   label: string;
   options: Option[];
   onChange?: (diffs: MultiSelectDiff[]) => void;
@@ -58,12 +60,15 @@
     const {
       icon,
       popupPosition = PopupPosition.Auto,
+      minimal,
+      compact,
     } = attrs;
 
     return m(
         Popup,
         {
-          trigger: m(Button, {label: this.labelText(attrs), icon}),
+          trigger:
+              m(Button, {label: this.labelText(attrs), icon, minimal, compact}),
           position: popupPosition,
         },
         this.renderPopup(attrs),
diff --git a/ui/src/frontend/widgets/section.ts b/ui/src/frontend/widgets/section.ts
new file mode 100644
index 0000000..be88376
--- /dev/null
+++ b/ui/src/frontend/widgets/section.ts
@@ -0,0 +1,36 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import m from 'mithril';
+
+export interface SectionAttrs {
+  // The name of the section, displayed in the title bar
+  title: string;
+  // Remaining attributes forwarded to the underlying HTML <section>.
+  [htmlAttrs: string]: any;
+}
+
+export class Section implements m.ClassComponent<SectionAttrs> {
+  view({attrs, children}: m.CVnode<SectionAttrs>) {
+    const {title} = attrs;
+    return m(
+        'section.pf-section',
+        m(
+            'header',
+            m('h1', title),
+            ),
+        m('article', children),
+    );
+  }
+}
diff --git a/ui/src/frontend/widgets/sql_ref.ts b/ui/src/frontend/widgets/sql_ref.ts
new file mode 100644
index 0000000..1882fc5
--- /dev/null
+++ b/ui/src/frontend/widgets/sql_ref.ts
@@ -0,0 +1,58 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import m from 'mithril';
+
+import {Anchor} from '../anchor';
+import {copyToClipboard} from '../clipboard';
+import {Icons} from '../semantic_icons';
+
+import {MenuItem, PopupMenu2} from './menu';
+
+// This widget provides common styling and popup menu options for a SQL row,
+// given a table name and an ID.
+export interface SqlRefAttrs {
+  // The name of the table our row lives in.
+  table: string;
+  // The ID of our row.
+  // If not provided, `table[Unknown]` is shown with no popup menu.
+  id?: number;
+}
+
+export class SqlRef implements m.ClassComponent<SqlRefAttrs> {
+  view({attrs}: m.CVnode<SqlRefAttrs>) {
+    const {table, id} = attrs;
+    if (id !== undefined) {
+      return m(
+          PopupMenu2,
+          {
+            trigger: m(Anchor, {icon: Icons.ContextMenu}, `${table}[${id}]`),
+          },
+          m(MenuItem, {
+            label: 'Copy ID',
+            icon: 'content_copy',
+            onclick: () => copyToClipboard(`${id}`),
+          }),
+          m(MenuItem, {
+            label: 'Copy SQL query',
+            icon: 'file_copy',
+            onclick: () =>
+                copyToClipboard(`select * from ${table} where id=${id}`),
+          }),
+      );
+    } else {
+      return `${table}[Unknown]`;
+    }
+  }
+}
diff --git a/ui/src/frontend/widgets/timestamp.ts b/ui/src/frontend/widgets/timestamp.ts
index d8cc841..374f8e3 100644
--- a/ui/src/frontend/widgets/timestamp.ts
+++ b/ui/src/frontend/widgets/timestamp.ts
@@ -14,15 +14,37 @@
 
 import m from 'mithril';
 
-import {tpTimeToCode} from '../../common/time';
-import {toTraceTime, TPTimestamp} from '../sql_types';
+import {formatTime} from '../../common/time';
+import {Anchor} from '../anchor';
+import {copyToClipboard} from '../clipboard';
+import {Icons} from '../semantic_icons';
+import {TPTimestamp} from '../sql_types';
+
+import {MenuItem, PopupMenu2} from './menu';
 
 interface TimestampAttrs {
+  // The timestamp to print, this should be the absolute, raw timestamp as
+  // found in trace processor.
   ts: TPTimestamp;
+  minimal?: boolean;
 }
 
 export class Timestamp implements m.ClassComponent<TimestampAttrs> {
-  view(vnode: m.Vnode<TimestampAttrs>) {
-    return tpTimeToCode(toTraceTime(vnode.attrs.ts));
+  view({attrs}: m.Vnode<TimestampAttrs>) {
+    const {ts, minimal = false} = attrs;
+    return m(
+        PopupMenu2,
+        {
+          trigger:
+              m(Anchor, {icon: Icons.ContextMenu}, formatTime(ts, minimal)),
+        },
+        m(MenuItem, {
+          icon: Icons.Copy,
+          label: 'Copy raw timestamp',
+          onclick: () => {
+            copyToClipboard(ts.toString());
+          },
+        }),
+    );
   }
 }
diff --git a/ui/src/frontend/widgets/tree.ts b/ui/src/frontend/widgets/tree.ts
index e797343..141fda6 100644
--- a/ui/src/frontend/widgets/tree.ts
+++ b/ui/src/frontend/widgets/tree.ts
@@ -3,32 +3,16 @@
 import {classNames} from '../classnames';
 import {globals} from '../globals';
 
-import {Button} from './button';
-import {Spinner} from './spinner';
 import {hasChildren} from './utils';
 
-export enum TreeLayout {
-  // Classic heirachical tree layout with no columnar alignment.
-  // Example:
-  // foo: bar
-  //  ├ baz: qux
-  //  └ quux: corge
-  // grault: garply
-  Tree = 'tree',
-
-  // Heirachical tree layout but right values are horizontally aligned.
-  // Example:
-  // foo     bar
-  //  ├ baz  qux
-  //  └ quux corge
-  // grault  garply
-  Grid = 'grid',
-}
+// Heirachical tree layout but right values are horizontally aligned.
+// Example:
+// foo     bar
+//  ├ baz  qux
+//  └ quux corge
+// grault  garply
 
 interface TreeAttrs {
-  // The style of layout.
-  // Defaults to grid.
-  layout?: TreeLayout;
   // Space delimited class list applied to our tree element.
   className?: string;
 }
@@ -36,17 +20,14 @@
 export class Tree implements m.ClassComponent<TreeAttrs> {
   view({attrs, children}: m.Vnode<TreeAttrs>): m.Children {
     const {
-      layout: style = TreeLayout.Grid,
       className = '',
     } = attrs;
 
-    if (style === TreeLayout.Grid) {
-      return m('.pf-ptree-grid', {class: className}, children);
-    } else if (style === TreeLayout.Tree) {
-      return m('.pf-ptree', {class: className}, children);
-    } else {
-      return null;
-    }
+    const classes = classNames(
+        className,
+    );
+
+    return m('.pf-tree', {class: classes}, children);
   }
 }
 
@@ -64,6 +45,11 @@
   // Whether this node is collapsed or not.
   // If omitted, collapsed state 'uncontrolled' - i.e. controlled internally.
   collapsed?: boolean;
+  loading?: boolean;
+  showCaret?: boolean;
+  // Optional icon to show to the left of the text.
+  // If this node contains children, this icon is ignored.
+  icon?: string;
   // Called when the collapsed state is changed, mainly used in controlled mode.
   onCollapseChanged?: (collapsed: boolean, attrs: TreeNodeAttrs) => void;
 }
@@ -71,26 +57,49 @@
 export class TreeNode implements m.ClassComponent<TreeNodeAttrs> {
   private collapsed = false;
   view(vnode: m.CVnode<TreeNodeAttrs>): m.Children {
-    return [
-      m(
-          '.pf-tree-node',
-          this.renderLeft(vnode),
-          this.renderRight(vnode),
-          ),
-      hasChildren(vnode) && this.renderChildren(vnode),
-    ];
+    const {children, attrs, attrs: {left, onCollapseChanged = () => {}}} =
+        vnode;
+    return m(
+        '.pf-tree-node',
+        {
+          class: classNames(this.getClassNameForNode(vnode)),
+        },
+        m('span.pf-tree-gutter', {
+          onclick: () => {
+            this.collapsed = !this.isCollapsed(vnode);
+            onCollapseChanged(this.collapsed, attrs);
+            globals.rafScheduler.scheduleFullRedraw();
+          },
+        }),
+        m(
+            '.pf-tree-content',
+            m('.pf-tree-left', left),
+            this.renderRight(vnode),
+            ),
+        hasChildren(vnode) &&
+            [
+              m('span.pf-tree-indent-gutter'),
+              m('.pf-tree-children', children),
+            ],
+    );
   }
 
-  private renderLeft(vnode: m.CVnode<TreeNodeAttrs>) {
+  private getClassNameForNode(vnode: m.CVnode<TreeNodeAttrs>) {
     const {
-      attrs: {left},
-    } = vnode;
-
-    return m(
-        '.pf-tree-left',
-        left,
-        hasChildren(vnode) && this.renderCollapseButton(vnode),
-    );
+      loading = false,
+      showCaret = false,
+    } = vnode.attrs;
+    if (loading) {
+      return 'pf-loading';
+    } else if (hasChildren(vnode) || showCaret) {
+      if (this.isCollapsed(vnode)) {
+        return 'pf-collapsed';
+      } else {
+        return 'pf-expanded';
+      }
+    } else {
+      return undefined;
+    }
   }
 
   private renderRight(vnode: m.CVnode<TreeNodeAttrs>) {
@@ -102,33 +111,6 @@
     }
   }
 
-  private renderChildren(vnode: m.CVnode<TreeNodeAttrs>) {
-    const {children} = vnode;
-
-    return m(
-        '.pf-tree-children',
-        {
-          class: classNames(this.isCollapsed(vnode) && 'pf-pgrid-hidden'),
-        },
-        children,
-    );
-  }
-
-  private renderCollapseButton(vnode: m.Vnode<TreeNodeAttrs>) {
-    const {attrs, attrs: {onCollapseChanged = () => {}}} = vnode;
-
-    return m(Button, {
-      icon: this.isCollapsed(vnode) ? 'chevron_right' : 'expand_more',
-      minimal: true,
-      compact: true,
-      onclick: () => {
-        this.collapsed = !this.isCollapsed(vnode);
-        onCollapseChanged(this.collapsed, attrs);
-        globals.rafScheduler.scheduleFullRedraw();
-      },
-    });
-  }
-
   private isCollapsed({attrs}: m.Vnode<TreeNodeAttrs>): boolean {
     // If collapsed is omitted, use our local collapsed state instead.
     const {
@@ -139,7 +121,7 @@
   }
 }
 
-export function dictToTree(dict: {[key: string]: m.Child}): m.Children {
+export function dictToTreeNodes(dict: {[key: string]: m.Child}): m.Child[] {
   const children: m.Child[] = [];
   for (const key of Object.keys(dict)) {
     children.push(m(TreeNode, {
@@ -147,7 +129,12 @@
       right: dict[key],
     }));
   }
-  return m(Tree, children);
+  return children;
+}
+
+// Create a flat tree from a POJO
+export function dictToTree(dict: {[key: string]: m.Child}): m.Children {
+  return m(Tree, dictToTreeNodes(dict));
 }
 
 interface LazyTreeNodeAttrs {
@@ -156,6 +143,8 @@
   // Same as TreeNode (see above).
   right?: m.Children;
   // Same as TreeNode (see above).
+  icon?: string;
+  // Same as TreeNode (see above).
   summary?: m.Children;
   // A callback to be called when the TreeNode is expanded, in order to fetch
   // child nodes.
@@ -164,9 +153,9 @@
   // children is to avoid storing vnodes between render cycles, which is a bug
   // in Mithril.
   fetchData: () => Promise<() => m.Children>;
-  // Whether to keep child nodes in memory after the node has been collapsed.
-  // Defaults to true
-  hoardData?: boolean;
+  // Whether to unload children on collapse.
+  // Defaults to false, data will be kept in memory until the node is destroyed.
+  unloadOnCollapse?: boolean;
 }
 
 // This component is a TreeNode which only loads child nodes when it's expanded.
@@ -174,19 +163,17 @@
 // up front, and even allows us to represent infinite or recursive trees.
 export class LazyTreeNode implements m.ClassComponent<LazyTreeNodeAttrs> {
   private collapsed: boolean = true;
-  private renderChildren = this.renderSpinner;
-
-  private renderSpinner(): m.Children {
-    return m(TreeNode, {left: m(Spinner)});
-  }
+  private loading: boolean = false;
+  private renderChildren?: () => m.Children;
 
   view({attrs}: m.CVnode<LazyTreeNodeAttrs>): m.Children {
     const {
       left,
       right,
+      icon,
       summary,
       fetchData,
-      hoardData = true,
+      unloadOnCollapse = false,
     } = attrs;
 
     return m(
@@ -194,25 +181,35 @@
         {
           left,
           right,
+          icon,
           summary,
+          showCaret: true,
+          loading: this.loading,
           collapsed: this.collapsed,
           onCollapseChanged: (collapsed) => {
             if (collapsed) {
-              if (!hoardData) {
-                this.renderChildren = this.renderSpinner;
+              if (unloadOnCollapse) {
+                this.renderChildren = undefined;
               }
             } else {
-              fetchData().then((result) => {
-                if (!this.collapsed) {
+              // Expanding
+              if (this.renderChildren) {
+                this.collapsed = false;
+                globals.rafScheduler.scheduleFullRedraw();
+              } else {
+                this.loading = true;
+                fetchData().then((result) => {
+                  this.loading = false;
+                  this.collapsed = false;
                   this.renderChildren = result;
                   globals.rafScheduler.scheduleFullRedraw();
-                }
-              });
+                });
+              }
             }
             this.collapsed = collapsed;
             globals.rafScheduler.scheduleFullRedraw();
           },
         },
-        this.renderChildren());
+        this.renderChildren && this.renderChildren());
   }
 }
diff --git a/ui/src/frontend/widgets/utils.ts b/ui/src/frontend/widgets/utils.ts
index c05cd47..6521162 100644
--- a/ui/src/frontend/widgets/utils.ts
+++ b/ui/src/frontend/widgets/utils.ts
@@ -38,7 +38,16 @@
   return el as HTMLElement;
 }
 
+// Return true if value is not nullish - i.e. not null or undefined
+// Allows doing the following
+//   exists(val) && m('div', val)
+// Even if val is a non-nullish falsey value like 0 or ''
+export function exists<T>(value: T): value is Exclude<T, null|undefined> {
+  return value !== undefined && value !== null;
+}
+
 // Check if a mithril component vnode has children
 export function hasChildren({children}: m.Vnode<any>): boolean {
-  return Array.isArray(children) && children.length > 0;
+  return Array.isArray(children) && children.length > 0 &&
+      children.some(exists);
 }
diff --git a/ui/src/frontend/widgets_page.ts b/ui/src/frontend/widgets_page.ts
index 989be89..e3fc2e3 100644
--- a/ui/src/frontend/widgets_page.ts
+++ b/ui/src/frontend/widgets_page.ts
@@ -20,6 +20,7 @@
 import {LIBRARY_ADD_CHECK} from './icons';
 import {createPage} from './pages';
 import {PopupMenuButton} from './popup_menu';
+import {Icons} from './semantic_icons';
 import {TableShowcase} from './tables/table_showcase';
 import {Button} from './widgets/button';
 import {Checkbox} from './widgets/checkbox';
@@ -34,7 +35,7 @@
 import {Spinner} from './widgets/spinner';
 import {Switch} from './widgets/switch';
 import {TextInput} from './widgets/text_input';
-import {LazyTreeNode, Tree, TreeLayout, TreeNode} from './widgets/tree';
+import {LazyTreeNode, Tree, TreeNode} from './widgets/tree';
 
 const options: {[key: string]: boolean} = {
   foobar: false,
@@ -243,19 +244,6 @@
   }
 }
 
-function recursiveLazyTreeNode(
-    left: string, summary: string, hoardData: boolean): m.Children {
-  return m(LazyTreeNode, {
-    left,
-    summary,
-    hoardData,
-    fetchData: async () => {
-      await new Promise((r) => setTimeout(r, 200));
-      return () => recursiveLazyTreeNode(left, summary, hoardData);
-    },
-  });
-}
-
 export const WidgetsPage = createPage({
   view() {
     return m(
@@ -490,7 +478,10 @@
           renderWidget: (opts) => m(
               PopupMenu2,
               {
-                trigger: m(Button, {label: 'Menu', icon: 'arrow_drop_down'}),
+                trigger: m(Button, {
+                  label: 'Menu',
+                  rightIcon: Icons.ContextMenu,
+                }),
                 ...opts,
               },
               m(MenuItem, {label: 'New', icon: 'add'}),
@@ -543,63 +534,79 @@
         m('h2', 'Tree'),
         m(WidgetShowcase, {
           renderWidget: (opts) => m(
-              Tree,
-              opts,
-              m(TreeNode, {left: 'Name', right: 'my_event'}),
-              m(TreeNode, {left: 'CPU', right: '2'}),
-              m(TreeNode, {
-                left: 'SQL',
-                right: m(
-                    PopupMenu2,
-                    {
-                      trigger: m(Anchor, {
-                        text: 'SELECT * FROM ftrace_event WHERE id = 123',
-                        icon: 'unfold_more',
-                      }),
-                    },
-                    m(MenuItem, {
-                      label: 'Copy SQL Query',
-                      icon: 'content_copy',
-                    }),
-                    m(MenuItem, {
-                      label: 'Execute Query in new tab',
-                      icon: 'open_in_new',
-                    }),
-                    ),
-              }),
-              m(TreeNode, {
-                left: 'Thread',
-                right: m(Anchor, {text: 'my_thread[456]', icon: 'open_in_new'}),
-              }),
-              m(TreeNode, {
-                left: 'Process',
-                right: m(Anchor, {text: '/bin/foo[789]', icon: 'open_in_new'}),
-              }),
-              recursiveLazyTreeNode('Lazy', '(hoarding)', true),
-              recursiveLazyTreeNode('Lazy', '(non-hoarding)', false),
-              m(
-                  TreeNode,
-                  {
-                    left: 'Args',
-                    summary: 'foo: string, baz: string, quux: string[4]',
-                  },
-                  m(TreeNode, {left: 'foo', right: 'bar'}),
-                  m(TreeNode, {left: 'baz', right: 'qux'}),
-                  m(
-                      TreeNode,
-                      {left: 'quux'},
-                      m(TreeNode, {left: '[0]', right: 'corge'}),
-                      m(TreeNode, {left: '[1]', right: 'grault'}),
-                      m(TreeNode, {left: '[2]', right: 'garply'}),
-                      m(TreeNode, {left: '[3]', right: 'waldo'}),
-                      ),
-                  ),
-              ),
-          initialOpts: {
-            layout: new EnumOption(
-                TreeLayout.Grid,
-                Object.values(TreeLayout),
+            Tree,
+            opts,
+            m(TreeNode, {left: 'Name', right: 'my_event', icon: 'badge'}),
+            m(TreeNode, {left: 'CPU', right: '2', icon: 'memory'}),
+            m(TreeNode,
+              {left: 'Start time', right: '1s 435ms', icon: 'schedule'}),
+            m(TreeNode, {left: 'Duration', right: '86ms', icon: 'timer'}),
+            m(TreeNode, {
+              left: 'SQL',
+              right: m(
+                PopupMenu2,
+                {
+                  popupPosition: PopupPosition.RightStart,
+                  trigger: m(Anchor, {
+                    icon: Icons.ContextMenu,
+                  }, 'SELECT * FROM raw WHERE id = 123'),
+                },
+                m(MenuItem, {
+                  label: 'Copy SQL Query',
+                  icon: 'content_copy',
+                }),
+                m(MenuItem, {
+                  label: 'Execute Query in new tab',
+                  icon: 'open_in_new',
+                }),
                 ),
+            }),
+            m(TreeNode, {
+              icon: 'account_tree',
+              left: 'Process',
+              right: m(Anchor, {icon: 'open_in_new'}, '/bin/foo[789]'),
+            }),
+            m(TreeNode, {
+              left: 'Thread',
+              right: m(Anchor, {icon: 'open_in_new'}, 'my_thread[456]'),
+            }),
+            m(
+              TreeNode,
+              {
+                left: 'Args',
+                summary: 'foo: string, baz: string, quux: string[4]',
+              },
+              m(TreeNode, {left: 'foo', right: 'bar'}),
+              m(TreeNode, {left: 'baz', right: 'qux'}),
+              m(
+                TreeNode,
+                {left: 'quux', summary: 'string[4]'},
+                m(TreeNode, {left: '[0]', right: 'corge'}),
+                m(TreeNode, {left: '[1]', right: 'grault'}),
+                m(TreeNode, {left: '[2]', right: 'garply'}),
+                m(TreeNode, {left: '[3]', right: 'waldo'}),
+                ),
+              ),
+            m(LazyTreeNode, {
+              left: 'Lazy',
+              icon: 'bedtime',
+              fetchData: async () => {
+                await new Promise((r) => setTimeout(r, 1000));
+                return () => m(TreeNode, {left: 'foo'});
+              },
+            }),
+            m(LazyTreeNode, {
+              left: 'Dynamic',
+              unloadOnCollapse: true,
+              icon: 'bedtime',
+              fetchData: async () => {
+                await new Promise((r) => setTimeout(r, 1000));
+                return () => m(TreeNode, {left: 'foo'});
+              },
+            }),
+            ),
+          initialOpts: {
+            hideControls: false,
           },
           wide: true,
         }),
diff --git a/ui/src/tracks/debug/add_debug_track_menu.ts b/ui/src/tracks/debug/add_debug_track_menu.ts
index 32bc871..896c877 100644
--- a/ui/src/tracks/debug/add_debug_track_menu.ts
+++ b/ui/src/tracks/debug/add_debug_track_menu.ts
@@ -36,10 +36,18 @@
 
 export class AddDebugTrackMenu implements
     m.ClassComponent<AddDebugTrackMenuAttrs> {
+  readonly columns: string[];
+
   name: string = '';
   sliceColumns: SliceColumns;
+  arrangeBy?: {
+    type: 'thread'|'process',
+    column: string,
+  };
 
   constructor(vnode: m.Vnode<AddDebugTrackMenuAttrs>) {
+    this.columns = [...vnode.attrs.columns];
+
     const chooseDefaultOption = (name: string) => {
       for (const column of vnode.attrs.columns) {
         if (column === name) return column;
diff --git a/ui/src/tracks/debug/details_tab.ts b/ui/src/tracks/debug/details_tab.ts
index 50c2319..9b7868a 100644
--- a/ui/src/tracks/debug/details_tab.ts
+++ b/ui/src/tracks/debug/details_tab.ts
@@ -14,19 +14,45 @@
 
 import m from 'mithril';
 
-import {ColumnType} from '../../common/query_result';
-import {tpDurationFromSql, tpTimeFromSql} from '../../common/time';
+import {GridLayout} from '../..//frontend/widgets/grid_layout';
+import {Section} from '../..//frontend/widgets/section';
+import {ColumnType, LONG, STR} from '../../common/query_result';
+import {TPDuration, tpDurationFromSql, tpTimeFromSql} from '../../common/time';
 import {
   BottomTab,
   bottomTabRegistry,
   NewBottomTabArgs,
 } from '../../frontend/bottom_tab';
 import {globals} from '../../frontend/globals';
-import {asTPTimestamp} from '../../frontend/sql_types';
+import {
+  getSlice,
+  SliceDetails,
+  sliceRef,
+} from '../../frontend/sql/slice';
+import {
+  asSliceSqlId,
+  asTPTimestamp,
+  TPTimestamp,
+  Utid,
+} from '../../frontend/sql_types';
+import {
+  getProcessName,
+  getThreadName,
+} from '../../frontend/thread_and_process_info';
+import {
+  getThreadState,
+  ThreadState,
+  threadStateRef,
+} from '../../frontend/thread_state';
+import {DetailsShell} from '../../frontend/widgets/details_shell';
 import {Duration} from '../../frontend/widgets/duration';
 import {Timestamp} from '../../frontend/widgets/timestamp';
-import {dictToTree} from '../../frontend/widgets/tree';
-
+import {
+  dictToTree,
+  dictToTreeNodes,
+  Tree,
+  TreeNode,
+} from '../../frontend/widgets/tree';
 import {ARG_PREFIX} from './add_debug_track_menu';
 
 interface DebugSliceDetailsTabConfig {
@@ -34,7 +60,7 @@
   id: number;
 }
 
-function SqlValueToString(val: ColumnType) {
+function sqlValueToString(val: ColumnType): string {
   if (val instanceof Uint8Array) {
     return `<blob length=${val.length}>`;
   }
@@ -44,56 +70,191 @@
   return val.toString();
 }
 
+function sqlValueToNumber(value?: ColumnType): number|undefined {
+  if (typeof value === 'bigint') return Number(value);
+  if (typeof value !== 'number') return undefined;
+  return value;
+}
+
+function sqlValueToUtid(value?: ColumnType): Utid|undefined {
+  if (typeof value === 'bigint') return Number(value) as Utid;
+  if (typeof value !== 'number') return undefined;
+  return value as Utid;
+}
+
+function renderTreeContents(dict: {[key: string]: m.Child}): m.Child[] {
+  const children: m.Child[] = [];
+  for (const key of Object.keys(dict)) {
+    if (dict[key] === null || dict[key] === undefined) continue;
+    children.push(m(TreeNode, {
+      left: key,
+      right: dict[key],
+    }));
+  }
+  return children;
+}
+
 export class DebugSliceDetailsTab extends
     BottomTab<DebugSliceDetailsTabConfig> {
   static readonly kind = 'org.perfetto.DebugSliceDetailsTab';
 
-  data: {[key: string]: ColumnType}|undefined;
+  data?: {
+    name: string,
+    ts: TPTimestamp,
+    dur: TPDuration,
+    args: {[key: string]: ColumnType};
+  };
+  // We will try to interpret the arguments as references into well-known
+  // tables. These values will be set if the relevant columns exist and
+  // are consistent (e.g. 'ts' and 'dur' for this slice correspond to values
+  // in these well-known tables).
+  threadState?: ThreadState;
+  slice?: SliceDetails;
 
   static create(args: NewBottomTabArgs): DebugSliceDetailsTab {
     return new DebugSliceDetailsTab(args);
   }
 
+  private async maybeLoadThreadState(
+      id: number|undefined, ts: TPTimestamp, dur: TPDuration,
+      utid?: Utid): Promise<ThreadState|undefined> {
+    if (id === undefined) return undefined;
+    if (utid === undefined) return undefined;
+
+    const threadState = await getThreadState(this.engine, id);
+    if (threadState === undefined) return undefined;
+    if (threadState.ts === ts && threadState.dur === dur &&
+        threadState.thread?.utid === utid) {
+      return threadState;
+    } else {
+      return undefined;
+    }
+  }
+
+  private renderThreadStateInfo(): m.Child {
+    if (this.threadState === undefined) return null;
+    return m(
+        TreeNode,
+        {
+          left: threadStateRef(this.threadState),
+          right: '',
+        },
+        renderTreeContents({
+          'Thread': getThreadName(this.threadState.thread),
+          'Process': getProcessName(this.threadState.thread?.process),
+          'State': this.threadState.state,
+        }));
+  }
+
+  private async maybeLoadSlice(
+      id: number|undefined, ts: TPTimestamp, dur: TPDuration,
+      sqlTrackId?: number): Promise<SliceDetails|undefined> {
+    if (id === undefined) return undefined;
+    if (sqlTrackId === undefined) return undefined;
+
+    const slice = await getSlice(this.engine, asSliceSqlId(id));
+    if (slice === undefined) return undefined;
+    if (slice.ts === ts && slice.dur === dur &&
+        slice.sqlTrackId === sqlTrackId) {
+      return slice;
+    } else {
+      return undefined;
+    }
+  }
+
+  private renderSliceInfo(): m.Child {
+    if (this.slice === undefined) return null;
+    return m(
+        TreeNode,
+        {
+          left: sliceRef(this.slice, 'Slice'),
+          right: '',
+        },
+        renderTreeContents({
+          'Name': this.slice.name,
+          'Thread': getThreadName(this.slice.thread),
+          'Process': getProcessName(this.slice.process),
+        }));
+  }
+
+
+  private async loadData() {
+    const queryResult = await this.engine.query(`select * from ${
+        this.config.sqlTableName} where id = ${this.config.id}`);
+    const row = queryResult.firstRow({
+      ts: LONG,
+      dur: LONG,
+      name: STR,
+    });
+    this.data = {
+      name: row.name,
+      ts: row.ts as TPTimestamp,
+      dur: row.dur,
+      args: {},
+    };
+
+    for (const key of Object.keys(row)) {
+      if (key.startsWith(ARG_PREFIX)) {
+        this.data.args[key.substr(ARG_PREFIX.length)] =
+            (row as {[key: string]: ColumnType})[key];
+      }
+    }
+
+    this.threadState = await this.maybeLoadThreadState(
+        sqlValueToNumber(this.data.args['id']),
+        this.data.ts,
+        this.data.dur,
+        sqlValueToUtid(this.data.args['utid']));
+
+    this.slice = await this.maybeLoadSlice(
+        sqlValueToNumber(this.data.args['id']) ??
+            sqlValueToNumber(this.data.args['slice_id']),
+        this.data.ts,
+        this.data.dur,
+        sqlValueToNumber(this.data.args['track_id']));
+
+    globals.rafScheduler.scheduleRedraw();
+  }
+
   constructor(args: NewBottomTabArgs) {
     super(args);
-
-    this.engine
-        .query(`select * from ${this.config.sqlTableName} where id = ${
-            this.config.id}`)
-        .then((queryResult) => {
-          this.data = queryResult.firstRow({});
-          globals.rafScheduler.scheduleFullRedraw();
-        });
+    this.loadData();
   }
 
   viewTab() {
     if (this.data === undefined) {
       return m('h2', 'Loading');
     }
-    const left = dictToTree({
+    const details = dictToTreeNodes({
       'Name': this.data['name'] as string,
       'Start time':
           m(Timestamp, {ts: asTPTimestamp(tpTimeFromSql(this.data['ts']))}),
       'Duration': m(Duration, {dur: tpDurationFromSql(this.data['dur'])}),
       'Debug slice id': `${this.config.sqlTableName}[${this.config.id}]`,
     });
+    details.push(this.renderThreadStateInfo());
+    details.push(this.renderSliceInfo());
+
     const args: {[key: string]: m.Child} = {};
-    for (const key of Object.keys(this.data)) {
-      if (key.startsWith(ARG_PREFIX)) {
-        args[key.substr(ARG_PREFIX.length)] = SqlValueToString(this.data[key]);
-      }
+    for (const key of Object.keys(this.data.args)) {
+      args[key] = sqlValueToString(this.data.args[key]);
     }
+
     return m(
-        '.details-panel',
-        m('header.overview', m('span', 'Debug Slice')),
-        m('.details-table-multicolumn',
-          {
-            style: {
-              'user-select': 'text',
-            },
-          },
-          m('.half-width-panel', left),
-          m('.half-width-panel', dictToTree(args))));
+        DetailsShell,
+        {
+          title: 'Debug Slice',
+        },
+        m(
+            GridLayout,
+            m(
+                Section,
+                {title: 'Details'},
+                m(Tree, details),
+                ),
+            m(Section, {title: 'Arguments'}, dictToTree(args)),
+            ),
+    );
   }
 
   getTitle(): string {
diff --git a/ui/src/tracks/scroll_jank/event_latency_track.ts b/ui/src/tracks/scroll_jank/event_latency_track.ts
index d805e74..9c60a01 100644
--- a/ui/src/tracks/scroll_jank/event_latency_track.ts
+++ b/ui/src/tracks/scroll_jank/event_latency_track.ts
@@ -44,13 +44,8 @@
     if (this.createdModels) {
       return;
     }
-    const sql = `CREATE VIEW ${tableName} AS ` + generateSqlWithInternalLayout({
-                  columns: ['id', 'ts', 'dur', 'track_id', 'name'],
-                  layoutParams: {ts: 'ts', dur: 'dur'},
-                  sourceTable: 'slice',
-                  whereClause: 'slice.id IN ' +
-                      '(SELECT slice_id FROM event_latency_scroll_jank_cause)',
-                });
+    const sql = `CREATE VIEW ${tableName} AS ` +
+        `SELECT * FROM _perfetto_ui_impl_chrome_event_latency_scroll_janks`;
     await this.engine.query(sql);
     this.createdModels = true;
   }
@@ -69,6 +64,18 @@
       SELECT RUN_METRIC('chrome/event_latency_scroll_jank_cause.sql');
     `);
 
+  const sql =
+      `CREATE TABLE _perfetto_ui_impl_chrome_event_latency_scroll_janks AS ` +
+      generateSqlWithInternalLayout({
+        columns: ['id', 'ts', 'dur', 'track_id', 'name'],
+        layoutParams: {ts: 'ts', dur: 'dur'},
+        sourceTable: 'slice',
+        whereClause: 'slice.id IN ' +
+            '(SELECT slice_id FROM event_latency_scroll_jank_cause)',
+      });
+
+  await engine.query(sql);
+
   result.tracksToAdd.push({
     id: uuidv4(),
     engineId: engine.id,