ui: Use the tp generated stdlib documentation
diff --git a/python/generators/sql_processing/stdlib_parser.py b/python/generators/sql_processing/stdlib_parser.py
index bbbf11e..9f820c5 100644
--- a/python/generators/sql_processing/stdlib_parser.py
+++ b/python/generators/sql_processing/stdlib_parser.py
@@ -20,7 +20,6 @@
 """
 
 import os
-from collections import defaultdict
 from pathlib import Path
 from typing import List, Tuple, Optional
 
@@ -196,190 +195,61 @@
   }
 
 
-def format_docs(modules: List[Tuple[str, str, str, ParsedModule]]) -> list:
-  """Format parsed modules as documentation JSON (for gen_stdlib_docs_json).
+def format_metadata(modules: List[Tuple[str, str, str, ParsedModule]]) -> dict:
+  """Format only the metadata not available from the TP table functions.
 
-  Output format matches what gen_stdlib_docs_json currently produces.
+  The TP exposes module names, packages, table/function/macro names,
+  descriptions, types, columns and args. This function emits only the
+  complementary metadata that lives outside the SQL syntax:
+    - tags and data-availability check SQL (module level)
+    - includes (INCLUDE PERFETTO MODULE directives)
+    - importance and data-availability check SQL (table level)
+
+  Output (keyed by module name so the UI can look up by key):
+  {
+    "android.memory": {
+      "tags": ["android"],
+      "includes": ["android.memory.heap"],
+      "data_check_sql": "SELECT EXISTS(...) AS has_data",  // null if absent
+      "tables": {                                          // omitted if empty
+        "android_heap_profile_allocation": {
+          "importance": "high",                           // null if absent
+          "data_check_sql": "SELECT EXISTS(...) AS has_data"  // null if absent
+        }
+      }
+    },
+    ...
+  }
   """
-
-  # Use the curated data check SQL map
-  data_check_sql_map = MODULE_DATA_CHECK_SQL
-
-  def _summary_desc(s: str) -> str:
-    """Extract the first sentence from a description."""
-    s = s.replace('\n', ' ')
-    if '. ' in s:
-      return s.split('. ')[0]
-    elif '.' in s:
-      return s.split('.')[0]
-    return s
-
-  def _create_field_dict(name: str, obj, include_desc: bool = True) -> dict:
-    """Create a dictionary for a column or argument.
-
-    Parses long_type to extract table and column references.
-    Expected format: "TYPE(table_name.column_name)" where TYPE is optional uppercase,
-    and table_name and column_name are lowercase with underscores.
-    If the format doesn't match, table and column are set to None.
-    """
-    import re
-
-    # Parse long type string to extract table and column references
-    # Expected format: "TYPE(table_name.column_name)"
-    table, column = None, None
-    if hasattr(obj, 'long_type') and obj.long_type:
-      pattern = r'[A-Z]*\(([a-z_]*)\.([a-z_]*)\)'
-      m = re.match(pattern, obj.long_type)
-      if m:
-        table, column = m.groups()
-
-    result = {
-        'name': name,
-        'type': obj.long_type if hasattr(obj, 'long_type') else None,
-        'table': table,
-        'column': column,
-    }
-    if include_desc:
-      result['desc'] = obj.description if hasattr(obj, 'description') else None
-    return result
-
-  packages = defaultdict(list)
+  result = {}
 
   for _, _, module_name, parsed in modules:
-    package_name = module_name.split(".")[0]
+    tags = get_tags(module_name)
+    includes = [inc.module for inc in parsed.includes]
+    data_check_sql = (
+        check_to_query(MODULE_DATA_CHECK_SQL[module_name])
+        if module_name in MODULE_DATA_CHECK_SQL else None)
 
-    module_dict = {
-        'module_name':
-            module_name,
-        'module_doc': {
-            'name': parsed.module_doc.name,
-            'desc': parsed.module_doc.desc,
-        } if parsed.module_doc else None,
-        'tags':
-            get_tags(module_name),
-        'includes': [inc.module for inc in parsed.includes],
-        'data_objects': [{
-            'name':
-                table.name,
-            'desc':
-                table.desc,
-            'summary_desc':
-                _summary_desc(table.desc),
-            'type':
-                table.type,
-            'visibility':
-                'private' if is_internal(table.name) else 'public',
-            'importance':
-                get_table_importance(table.name),
-            'data_check_sql':
-                check_to_query(TABLE_DATA_CHECK_SQL[table.name])
-                if table.name in TABLE_DATA_CHECK_SQL else None,
-            'cols': [
-                _create_field_dict(col_name, col)
-                for (col_name, col) in table.cols.items()
-            ]
+    tables = {}
+    for table in parsed.table_views:
+      importance = get_table_importance(table.name)
+      table_check = (
+          check_to_query(TABLE_DATA_CHECK_SQL[table.name])
+          if table.name in TABLE_DATA_CHECK_SQL else None)
+      if importance is not None or table_check is not None:
+        tables[table.name] = {
+            'importance': importance,
+            'data_check_sql': table_check,
         }
-                         for table in parsed.table_views],
-        'functions': [{
-            'name': function.name,
-            'desc': function.desc,
-            'summary_desc': _summary_desc(function.desc),
-            'visibility': 'private' if is_internal(function.name) else 'public',
-            'args': [
-                _create_field_dict(arg_name, arg)
-                for (arg_name, arg) in function.args.items()
-            ],
-            'return_type': function.return_type,
-            'return_desc': function.return_desc,
-        }
-                      for function in parsed.functions],
-        'table_functions': [{
-            'name':
-                function.name,
-            'desc':
-                function.desc,
-            'summary_desc':
-                _summary_desc(function.desc),
-            'visibility':
-                'private' if is_internal(function.name) else 'public',
-            'args': [
-                _create_field_dict(arg_name, arg)
-                for (arg_name, arg) in function.args.items()
-            ],
-            'cols': [
-                _create_field_dict(col_name, col)
-                for (col_name, col) in function.cols.items()
-            ]
-        }
-                            for function in parsed.table_functions],
-        'macros': [{
-            'name':
-                macro.name,
-            'desc':
-                macro.desc,
-            'summary_desc':
-                _summary_desc(macro.desc),
-            'visibility':
-                'private' if is_internal(macro.name) else 'public',
-            'return_desc':
-                macro.return_desc,
-            'return_type':
-                macro.return_type,
-            'args': [
-                _create_field_dict(arg_name, arg)
-                for (arg_name, arg) in macro.args.items()
-            ],
-        }
-                   for macro in parsed.macros],
-        'data_check_sql':
-            check_to_query(data_check_sql_map.get(module_name))
-            if module_name in data_check_sql_map else None,
-    }
-    packages[package_name].append(module_dict)
 
-  packages_list = [{
-      "name": name,
-      "modules": modules
-  } for name, modules in packages.items()]
-
-  return packages_list
-
-
-def format_full(modules: List[Tuple[str, str, str, ParsedModule]]) -> dict:
-  """Format parsed modules with full information (for check_sql_modules.py).
-
-  Includes raw SQL and parsed module data for validation.
-  """
-  modules_list = []
-
-  for abs_path, rel_path, module_name, parsed in modules:
-    # Read raw SQL
-    with open(abs_path, 'r', encoding='utf-8') as f:
-      sql = f.read()
-
-    # Extract includes in the format needed
-    includes = [{
-        'package':
-            inc.package,
-        'module':
-            inc.module,
-        'full_name':
-            f"{inc.package}.{inc.module}" if inc.package else inc.module
-    } for inc in parsed.includes]
-
-    module_dict = {
-        'path': abs_path,
-        'rel_path': rel_path,
-        'module_name': module_name,
-        'package_name': parsed.package_name,
-        'sql': sql,
+    entry = {
+        'tags': tags,
         'includes': includes,
-        'errors': parsed.errors,
-        'functions_count': len(parsed.functions),
-        'table_functions_count': len(parsed.table_functions),
-        'table_views_count': len(parsed.table_views),
-        'macros_count': len(parsed.macros),
+        'data_check_sql': data_check_sql,
     }
-    modules_list.append(module_dict)
+    if tables:
+      entry['tables'] = tables
 
-  return {'modules': modules_list}
+    result[module_name] = entry
+
+  return result
diff --git a/src/trace_processor/perfetto_sql/intrinsics/table_functions/stdlib_docs_table_function.cc b/src/trace_processor/perfetto_sql/intrinsics/table_functions/stdlib_docs_table_function.cc
index a72f36a..a380bfa 100644
--- a/src/trace_processor/perfetto_sql/intrinsics/table_functions/stdlib_docs_table_function.cc
+++ b/src/trace_processor/perfetto_sql/intrinsics/table_functions/stdlib_docs_table_function.cc
@@ -77,6 +77,32 @@
   return parsed;
 }
 
+// Calls callback(module_key, parsed) for each module matched by |arg|.
+// If |arg| is "*", all loaded modules are visited; otherwise exactly |arg|.
+// Returns the first parse failure encountered, if any.
+template <typename Fn>
+base::Status ForEachModule(const PerfettoSqlEngine* engine,
+                           const std::string& arg,
+                           Fn callback) {
+  if (arg == "*") {
+    for (const auto& kv : engine->GetModules()) {
+      const std::string& mod = kv.second;
+      auto parsed_or = ParseModule(engine, mod);
+      if (!parsed_or.ok()) {
+        return parsed_or.status();
+      }
+      callback(mod, *parsed_or);
+    }
+  } else {
+    auto parsed_or = ParseModule(engine, arg);
+    if (!parsed_or.ok()) {
+      return parsed_or.status();
+    }
+    callback(arg, *parsed_or);
+  }
+  return base::OkStatus();
+}
+
 }  // namespace
 
 // ============================================================================
@@ -130,28 +156,32 @@
 bool StdlibDocsTables::Cursor::Run(const std::vector<SqlValue>& arguments) {
   PERFETTO_DCHECK(arguments.size() == 1);
   table_.Clear();
-  if (arguments[0].is_null()) {
-    return OnSuccess(&table_.dataframe());
-  }
   if (arguments[0].type != SqlValue::kString) {
     return OnFailure(
         base::ErrStatus("__intrinsic_stdlib_tables: module must be a string"));
   }
-  std::string module_key = arguments[0].AsString();
-  auto parsed_or = ParseModule(engine_, module_key);
-  if (!parsed_or.ok()) {
-    return OnFailure(parsed_or.status());
-  }
-  for (const auto& tv : parsed_or->table_views) {
-    tables::StdlibDocsTablesTable::Row row;
-    row.name = string_pool_->InternString(base::StringView(tv.name));
-    row.type = string_pool_->InternString(base::StringView(tv.type));
-    row.description =
-        string_pool_->InternString(base::StringView(tv.description));
-    row.exposed = tv.exposed ? 1 : 0;
-    row.cols = string_pool_->InternString(
-        base::StringView(SerializeEntries(tv.columns)));
-    table_.Insert(row);
+  std::string arg = arguments[0].AsString();
+  auto st = ForEachModule(
+      engine_, arg,
+      [&](const std::string& module_key,
+          const stdlib_doc::ParsedModule& parsed) {
+        StringPool::Id mod_id =
+            string_pool_->InternString(base::StringView(module_key));
+        for (const auto& tv : parsed.table_views) {
+          tables::StdlibDocsTablesTable::Row row;
+          row.module = mod_id;
+          row.name = string_pool_->InternString(base::StringView(tv.name));
+          row.type = string_pool_->InternString(base::StringView(tv.type));
+          row.description =
+              string_pool_->InternString(base::StringView(tv.description));
+          row.exposed = tv.exposed ? 1 : 0;
+          row.cols = string_pool_->InternString(
+              base::StringView(SerializeEntries(tv.columns)));
+          table_.Insert(row);
+        }
+      });
+  if (!st.ok()) {
+    return OnFailure(st);
   }
   return OnSuccess(&table_.dataframe());
 }
@@ -187,34 +217,38 @@
 bool StdlibDocsFunctions::Cursor::Run(const std::vector<SqlValue>& arguments) {
   PERFETTO_DCHECK(arguments.size() == 1);
   table_.Clear();
-  if (arguments[0].is_null()) {
-    return OnSuccess(&table_.dataframe());
-  }
   if (arguments[0].type != SqlValue::kString) {
     return OnFailure(base::ErrStatus(
         "__intrinsic_stdlib_functions: module must be a string"));
   }
-  std::string module_key = arguments[0].AsString();
-  auto parsed_or = ParseModule(engine_, module_key);
-  if (!parsed_or.ok()) {
-    return OnFailure(parsed_or.status());
-  }
-  for (const auto& fn : parsed_or->functions) {
-    tables::StdlibDocsFunctionsTable::Row row;
-    row.name = string_pool_->InternString(base::StringView(fn.name));
-    row.description =
-        string_pool_->InternString(base::StringView(fn.description));
-    row.exposed = fn.exposed ? 1 : 0;
-    row.is_table_function = fn.is_table_function ? 1 : 0;
-    row.return_type =
-        string_pool_->InternString(base::StringView(fn.return_type));
-    row.return_description =
-        string_pool_->InternString(base::StringView(fn.return_description));
-    row.args =
-        string_pool_->InternString(base::StringView(SerializeEntries(fn.args)));
-    row.cols = string_pool_->InternString(
-        base::StringView(SerializeEntries(fn.columns)));
-    table_.Insert(row);
+  std::string arg = arguments[0].AsString();
+  auto st = ForEachModule(
+      engine_, arg,
+      [&](const std::string& module_key,
+          const stdlib_doc::ParsedModule& parsed) {
+        StringPool::Id mod_id =
+            string_pool_->InternString(base::StringView(module_key));
+        for (const auto& fn : parsed.functions) {
+          tables::StdlibDocsFunctionsTable::Row row;
+          row.module = mod_id;
+          row.name = string_pool_->InternString(base::StringView(fn.name));
+          row.description =
+              string_pool_->InternString(base::StringView(fn.description));
+          row.exposed = fn.exposed ? 1 : 0;
+          row.is_table_function = fn.is_table_function ? 1 : 0;
+          row.return_type =
+              string_pool_->InternString(base::StringView(fn.return_type));
+          row.return_description = string_pool_->InternString(
+              base::StringView(fn.return_description));
+          row.args = string_pool_->InternString(
+              base::StringView(SerializeEntries(fn.args)));
+          row.cols = string_pool_->InternString(
+              base::StringView(SerializeEntries(fn.columns)));
+          table_.Insert(row);
+        }
+      });
+  if (!st.ok()) {
+    return OnFailure(st);
   }
   return OnSuccess(&table_.dataframe());
 }
@@ -250,31 +284,35 @@
 bool StdlibDocsMacros::Cursor::Run(const std::vector<SqlValue>& arguments) {
   PERFETTO_DCHECK(arguments.size() == 1);
   table_.Clear();
-  if (arguments[0].is_null()) {
-    return OnSuccess(&table_.dataframe());
-  }
   if (arguments[0].type != SqlValue::kString) {
     return OnFailure(
         base::ErrStatus("__intrinsic_stdlib_macros: module must be a string"));
   }
-  std::string module_key = arguments[0].AsString();
-  auto parsed_or = ParseModule(engine_, module_key);
-  if (!parsed_or.ok()) {
-    return OnFailure(parsed_or.status());
-  }
-  for (const auto& macro : parsed_or->macros) {
-    tables::StdlibDocsMacrosTable::Row row;
-    row.name = string_pool_->InternString(base::StringView(macro.name));
-    row.description =
-        string_pool_->InternString(base::StringView(macro.description));
-    row.exposed = macro.exposed ? 1 : 0;
-    row.return_type =
-        string_pool_->InternString(base::StringView(macro.return_type));
-    row.return_description =
-        string_pool_->InternString(base::StringView(macro.return_description));
-    row.args = string_pool_->InternString(
-        base::StringView(SerializeEntries(macro.args)));
-    table_.Insert(row);
+  std::string arg = arguments[0].AsString();
+  auto st = ForEachModule(
+      engine_, arg,
+      [&](const std::string& module_key,
+          const stdlib_doc::ParsedModule& parsed) {
+        StringPool::Id mod_id =
+            string_pool_->InternString(base::StringView(module_key));
+        for (const auto& macro : parsed.macros) {
+          tables::StdlibDocsMacrosTable::Row row;
+          row.module = mod_id;
+          row.name = string_pool_->InternString(base::StringView(macro.name));
+          row.description =
+              string_pool_->InternString(base::StringView(macro.description));
+          row.exposed = macro.exposed ? 1 : 0;
+          row.return_type =
+              string_pool_->InternString(base::StringView(macro.return_type));
+          row.return_description = string_pool_->InternString(
+              base::StringView(macro.return_description));
+          row.args = string_pool_->InternString(
+              base::StringView(SerializeEntries(macro.args)));
+          table_.Insert(row);
+        }
+      });
+  if (!st.ok()) {
+    return OnFailure(st);
   }
   return OnSuccess(&table_.dataframe());
 }
diff --git a/src/trace_processor/perfetto_sql/intrinsics/table_functions/tables.py b/src/trace_processor/perfetto_sql/intrinsics/table_functions/tables.py
index a9baec2..599ec70 100644
--- a/src/trace_processor/perfetto_sql/intrinsics/table_functions/tables.py
+++ b/src/trace_processor/perfetto_sql/intrinsics/table_functions/tables.py
@@ -301,6 +301,7 @@
     class_name="StdlibDocsTablesTable",
     sql_name="not_exposed_to_sql",
     columns=[
+        C("module", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("name", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("type", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("description",
@@ -316,6 +317,7 @@
     class_name="StdlibDocsFunctionsTable",
     sql_name="not_exposed_to_sql",
     columns=[
+        C("module", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("name", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("description",
           CppString(),
@@ -340,6 +342,7 @@
     class_name="StdlibDocsMacrosTable",
     sql_name="not_exposed_to_sql",
     columns=[
+        C("module", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("name", CppString(), cpp_access=CppAccess.READ_AND_HIGH_PERF_WRITE),
         C("description",
           CppString(),
diff --git a/tools/gen_stdlib_docs_json.py b/tools/gen_stdlib_docs_json.py
index 2bde14e..0aa867b 100755
--- a/tools/gen_stdlib_docs_json.py
+++ b/tools/gen_stdlib_docs_json.py
@@ -26,7 +26,7 @@
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 sys.path.append(ROOT_DIR)
 
-from python.generators.sql_processing.stdlib_parser import parse_all_modules, format_docs
+from python.generators.sql_processing.stdlib_parser import parse_all_modules, format_metadata
 
 
 def main():
@@ -84,8 +84,8 @@
         include_internal=args.with_internal,
         name_filter=None)
 
-    # Format as docs JSON
-    output_data = format_docs(modules)
+    # Format as metadata-only JSON (doc content comes from TP table functions)
+    output_data = format_metadata(modules)
 
     # Write output
     with open(args.json_out, 'w', encoding='utf-8') as f:
diff --git a/ui/src/plugins/dev.perfetto.DataExplorer/datagrid_node_creation.ts b/ui/src/plugins/dev.perfetto.DataExplorer/datagrid_node_creation.ts
index 1b0605c..9f6401c 100644
--- a/ui/src/plugins/dev.perfetto.DataExplorer/datagrid_node_creation.ts
+++ b/ui/src/plugins/dev.perfetto.DataExplorer/datagrid_node_creation.ts
@@ -301,12 +301,11 @@
 
   // Create the table node with the specific table
   const tableNode = descriptor.factory(
+    {sqlTable: sqlTable.name},
     {
-      sqlTable,
-      sqlModules: deps.sqlModules,
-      trace: deps.trace,
+      allNodes: state.rootNodes,
+      context: {sqlModules: deps.sqlModules, trace: deps.trace},
     },
-    {allNodes: state.rootNodes},
   );
 
   // Connect table node to AddColumnsNode's secondary input (port 0)
diff --git a/ui/src/plugins/dev.perfetto.DataExplorer/graph_io.ts b/ui/src/plugins/dev.perfetto.DataExplorer/graph_io.ts
index a1b6192..295c362 100644
--- a/ui/src/plugins/dev.perfetto.DataExplorer/graph_io.ts
+++ b/ui/src/plugins/dev.perfetto.DataExplorer/graph_io.ts
@@ -195,8 +195,11 @@
         }
 
         const tableNode = tableDescriptor.factory(
-          {sqlTable, sqlModules, trace},
-          {allNodes: [...coreNodes, ...rightNodes]},
+          {sqlTable: sqlTable.name},
+          {
+            allNodes: [...coreNodes, ...rightNodes],
+            context: {sqlModules, trace},
+          },
         );
         target.push(tableNode);
       } catch (error) {
diff --git a/ui/src/plugins/dev.perfetto.SqlModules/index.ts b/ui/src/plugins/dev.perfetto.SqlModules/index.ts
index 658a79b..df670e6 100644
--- a/ui/src/plugins/dev.perfetto.SqlModules/index.ts
+++ b/ui/src/plugins/dev.perfetto.SqlModules/index.ts
@@ -14,19 +14,29 @@
 
 import m from 'mithril';
 import {assetSrc} from '../../base/assets';
-import {defer} from '../../base/deferred';
-import {extensions} from '../../components/extensions';
 import {App} from '../../public/app';
 import {PerfettoPlugin} from '../../public/plugin';
 import {Trace} from '../../public/trace';
 import {SqlModules, isTableEffectivelyDisabled} from './sql_modules';
+import {extensions} from '../../components/extensions';
 import {
-  SQL_MODULES_DOCS_SCHEMA,
-  SqlModulesDocsSchema,
-  SqlModulesImpl,
-} from './sql_modules_impl';
+  STDLIB_METADATA_SCHEMA,
+  StdlibMetadata,
+  loadSqlModulesFromTp,
+} from './sql_modules_from_tp';
 
-const docs = defer<SqlModulesDocsSchema>();
+// Metadata JSON is small and static — fetch it once when the app starts so
+// it is ready by the time a trace loads.
+let metadataPromise: Promise<StdlibMetadata> | undefined;
+
+function getMetadata(): Promise<StdlibMetadata> {
+  if (metadataPromise === undefined) {
+    metadataPromise = fetch(assetSrc('stdlib_docs.json'))
+      .then((r) => r.json())
+      .then((json) => STDLIB_METADATA_SCHEMA.parse(json));
+  }
+  return metadataPromise;
+}
 
 export default class SqlModulesPlugin implements PerfettoPlugin {
   static readonly id = 'dev.perfetto.SqlModules';
@@ -34,18 +44,14 @@
   private sqlModules: SqlModules | undefined;
 
   static onActivate(_: App): void {
-    // Load the SQL modules JSON file when the plugin when the app starts up,
-    // rather than waiting until trace load.
-    loadJson().then(docs.resolve.bind(docs));
+    // Kick off the metadata fetch early so it is ready before trace load.
+    getMetadata();
   }
 
   async onTraceLoad(trace: Trace): Promise<void> {
-    docs.then(async (resolvedDocs) => {
-      const impl = new SqlModulesImpl(trace, resolvedDocs);
-      // Don't initialize immediately - let consumers trigger it when needed
-      this.sqlModules = impl;
-      m.redraw();
-    });
+    const metadata = await getMetadata();
+    this.sqlModules = await loadSqlModulesFromTp(trace, metadata);
+    m.redraw();
 
     trace.commands.registerCommand({
       id: 'dev.perfetto.OpenSqlModulesTable',
@@ -58,7 +64,6 @@
 
         const tables = this.sqlModules.listTablesNames();
 
-        // Annotate disabled tables in the prompt
         const annotatedTables = tables.map((tableName) => {
           if (isTableEffectivelyDisabled(this.sqlModules!, tableName)) {
             return `${tableName} (no data)`;
@@ -74,14 +79,12 @@
           return;
         }
 
-        // Strip the annotation if present
         const actualTableName = chosenTable.replace(' (no data)', '');
         const module = this.sqlModules.getModuleForTable(actualTableName);
         if (module === undefined) {
           return;
         }
 
-        // Warn if opening a disabled table
         if (isTableEffectivelyDisabled(this.sqlModules, actualTableName)) {
           const proceed = window.confirm(
             `Warning: The table "${actualTableName}" may not have data in this trace. ` +
@@ -112,9 +115,3 @@
     return Promise.resolve();
   }
 }
-
-async function loadJson() {
-  const x = await fetch(assetSrc('stdlib_docs.json'));
-  const json = await x.json();
-  return SQL_MODULES_DOCS_SCHEMA.parse(json);
-}
diff --git a/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_from_tp.ts b/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_from_tp.ts
new file mode 100644
index 0000000..6a18038
--- /dev/null
+++ b/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_from_tp.ts
@@ -0,0 +1,248 @@
+// Copyright (C) 2025 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import {z} from 'zod';
+import {LONG, STR} from '../../trace_processor/query_result';
+import {Trace} from '../../public/trace';
+import {getOrCreate} from '../../base/utils';
+import {SqlModules} from './sql_modules';
+import {
+  SQL_MODULES_DOCS_SCHEMA,
+  SqlModulesImpl,
+  ARG_OR_COL_SCHEMA,
+  DATA_OBJECT_SCHEMA,
+  FUNCTION_SCHEMA,
+  TABLE_FUNCTION_SCHEMA,
+  MACRO_SCHEMA,
+} from './sql_modules_impl';
+
+// Schema for the metadata-only JSON generated by gen_stdlib_docs_json.py.
+// Contains only what the TP table functions don't expose: tags, includes,
+// data availability checks, and table importance.
+const TABLE_META_SCHEMA = z.object({
+  importance: z.enum(['core', 'high', 'mid', 'low']).nullish(),
+  data_check_sql: z.string().nullish(),
+});
+
+const MODULE_META_SCHEMA = z.object({
+  tags: z.array(z.string()),
+  includes: z.array(z.string()),
+  data_check_sql: z.string().nullish(),
+  tables: z.record(z.string(), TABLE_META_SCHEMA).optional(),
+});
+
+export const STDLIB_METADATA_SCHEMA = z.record(z.string(), MODULE_META_SCHEMA);
+export type StdlibMetadata = z.infer<typeof STDLIB_METADATA_SCHEMA>;
+
+// Types derived from the schema so any field-name mismatch is a compile error.
+type ArgOrCol = z.infer<typeof ARG_OR_COL_SCHEMA>;
+type DataObject = z.infer<typeof DATA_OBJECT_SCHEMA>;
+type FnObject = z.infer<typeof FUNCTION_SCHEMA>;
+type TblFnObject = z.infer<typeof TABLE_FUNCTION_SCHEMA>;
+type MacroObject = z.infer<typeof MACRO_SCHEMA>;
+
+// Schema for a single serialized entry in the cols/args JSON blobs from TP.
+const RAW_ENTRY_SCHEMA = z.object({
+  name: z.string(),
+  type: z.string(),
+  description: z.string(),
+});
+type RawEntry = z.infer<typeof RAW_ENTRY_SCHEMA>;
+
+function parseEntries(json: string): RawEntry[] {
+  try {
+    const parsed: unknown = JSON.parse(json);
+    if (!Array.isArray(parsed)) return [];
+    return parsed.flatMap((e) => {
+      const r = RAW_ENTRY_SCHEMA.safeParse(e);
+      return r.success ? [r.data] : [];
+    });
+  } catch {
+    return [];
+  }
+}
+
+function toArgOrCol(e: RawEntry, tableName: string | null = null): ArgOrCol {
+  return {
+    name: e.name,
+    type: e.type,
+    desc: e.description,
+    table: tableName,
+    column: null,
+  };
+}
+
+function summaryDesc(desc: string): string {
+  const trimmed = desc.replace(/\n/g, ' ');
+  const dot = trimmed.indexOf('. ');
+  if (dot !== -1) return trimmed.slice(0, dot);
+  const lastDot = trimmed.lastIndexOf('.');
+  if (lastDot !== -1) return trimmed.slice(0, lastDot);
+  return trimmed;
+}
+
+// Queries the TP's __intrinsic_stdlib_* table functions and merges with the
+// pre-generated metadata JSON to build a SqlModules object.
+// Uses 4 parallel queries (one per entity type) instead of per-module queries.
+export async function loadSqlModulesFromTp(
+  trace: Trace,
+  metadata: StdlibMetadata,
+): Promise<SqlModules> {
+  const engine = trace.engine;
+
+  const [modsResult, tablesResult, fnsResult, macrosResult] = await Promise.all(
+    [
+      engine.query('SELECT module, package FROM __intrinsic_stdlib_modules()'),
+      engine.query(
+        `SELECT module, name, type, description, exposed, cols
+           FROM __intrinsic_stdlib_tables('*')`,
+      ),
+      engine.query(
+        `SELECT module, name, description, exposed, is_table_function,
+                return_type, return_description, args, cols
+           FROM __intrinsic_stdlib_functions('*')`,
+      ),
+      engine.query(
+        `SELECT module, name, description, exposed,
+                return_type, return_description, args
+           FROM __intrinsic_stdlib_macros('*')`,
+      ),
+    ],
+  );
+
+  // Build package → [module] map.
+  const packageMap = new Map<string, string[]>();
+  const modsIter = modsResult.iter({module: STR, package: STR});
+  for (; modsIter.valid(); modsIter.next()) {
+    getOrCreate(packageMap, modsIter.package, () => []).push(modsIter.module);
+  }
+
+  // Group tables by module.
+  const tablesByModule = new Map<string, DataObject[]>();
+  const tIter = tablesResult.iter({
+    module: STR,
+    name: STR,
+    type: STR,
+    description: STR,
+    exposed: LONG,
+    cols: STR,
+  });
+  for (; tIter.valid(); tIter.next()) {
+    if (!tIter.exposed) continue;
+    const modKey = tIter.module;
+    const tableMeta = metadata[modKey]?.tables?.[tIter.name];
+    const desc = tIter.description;
+    getOrCreate(tablesByModule, modKey, () => []).push({
+      name: tIter.name,
+      desc,
+      summary_desc: summaryDesc(desc),
+      type: tIter.type,
+      importance: tableMeta?.importance ?? null,
+      data_check_sql: tableMeta?.data_check_sql ?? null,
+      cols: parseEntries(tIter.cols).map((e) => toArgOrCol(e, tIter.name)),
+    });
+  }
+
+  // Group functions and table functions by module.
+  const fnsByModule = new Map<string, FnObject[]>();
+  const tblFnsByModule = new Map<string, TblFnObject[]>();
+  const fIter = fnsResult.iter({
+    module: STR,
+    name: STR,
+    description: STR,
+    exposed: LONG,
+    is_table_function: LONG,
+    return_type: STR,
+    return_description: STR,
+    args: STR,
+    cols: STR,
+  });
+  for (; fIter.valid(); fIter.next()) {
+    if (!fIter.exposed) continue;
+    const modKey = fIter.module;
+    const desc = fIter.description;
+    const args = parseEntries(fIter.args).map((e) => toArgOrCol(e));
+    if (fIter.is_table_function) {
+      getOrCreate(tblFnsByModule, modKey, () => []).push({
+        name: fIter.name,
+        desc,
+        summary_desc: summaryDesc(desc),
+        args,
+        cols: parseEntries(fIter.cols).map((e) => toArgOrCol(e)),
+      });
+    } else {
+      getOrCreate(fnsByModule, modKey, () => []).push({
+        name: fIter.name,
+        desc,
+        summary_desc: summaryDesc(desc),
+        return_type: fIter.return_type,
+        return_desc: fIter.return_description,
+        args,
+      });
+    }
+  }
+
+  // Group macros by module.
+  const macrosByModule = new Map<string, MacroObject[]>();
+  const mIter = macrosResult.iter({
+    module: STR,
+    name: STR,
+    description: STR,
+    exposed: LONG,
+    return_type: STR,
+    return_description: STR,
+    args: STR,
+  });
+  for (; mIter.valid(); mIter.next()) {
+    if (!mIter.exposed) continue;
+    const modKey = mIter.module;
+    const desc = mIter.description;
+    getOrCreate(macrosByModule, modKey, () => []).push({
+      name: mIter.name,
+      desc,
+      summary_desc: summaryDesc(desc),
+      return_type: mIter.return_type,
+      return_desc: mIter.return_description,
+      args: parseEntries(mIter.args).map((e) => toArgOrCol(e)),
+    });
+  }
+
+  // Assemble the final docs structure.
+  const docs = [];
+  for (const [pkgName, moduleKeys] of packageMap) {
+    const pkgModules = [];
+    for (const modKey of moduleKeys) {
+      const meta = metadata[modKey];
+      pkgModules.push({
+        module_name: modKey,
+        tags: meta?.tags ?? [],
+        includes: meta?.includes ?? [],
+        data_check_sql: meta?.data_check_sql ?? null,
+        data_objects: tablesByModule.get(modKey) ?? [],
+        functions: fnsByModule.get(modKey) ?? [],
+        table_functions: tblFnsByModule.get(modKey) ?? [],
+        macros: macrosByModule.get(modKey) ?? [],
+      });
+    }
+    docs.push({name: pkgName, modules: pkgModules});
+  }
+
+  const parsed = SQL_MODULES_DOCS_SCHEMA.safeParse(docs);
+  if (!parsed.success) {
+    throw new Error(
+      `Failed to parse stdlib docs from TP: ${parsed.error.message}`,
+    );
+  }
+  return new SqlModulesImpl(trace, parsed.data);
+}
diff --git a/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_impl.ts b/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_impl.ts
index 3649e87..9ad0bfb 100644
--- a/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_impl.ts
+++ b/ui/src/plugins/dev.perfetto.SqlModules/sql_modules_impl.ts
@@ -462,7 +462,7 @@
   }
 }
 
-const ARG_OR_COL_SCHEMA = z.object({
+export const ARG_OR_COL_SCHEMA = z.object({
   name: z.string(),
   type: z.string(),
   desc: z.string(),
@@ -471,7 +471,7 @@
 });
 type DocsArgOrColSchemaType = z.infer<typeof ARG_OR_COL_SCHEMA>;
 
-const DATA_OBJECT_SCHEMA = z.object({
+export const DATA_OBJECT_SCHEMA = z.object({
   name: z.string(),
   desc: z.string(),
   summary_desc: z.string(),
@@ -482,7 +482,7 @@
 });
 type DocsDataObjectSchemaType = z.infer<typeof DATA_OBJECT_SCHEMA>;
 
-const FUNCTION_SCHEMA = z.object({
+export const FUNCTION_SCHEMA = z.object({
   name: z.string(),
   desc: z.string(),
   summary_desc: z.string(),
@@ -492,7 +492,7 @@
 });
 type DocsFunctionSchemaType = z.infer<typeof FUNCTION_SCHEMA>;
 
-const TABLE_FUNCTION_SCHEMA = z.object({
+export const TABLE_FUNCTION_SCHEMA = z.object({
   name: z.string(),
   desc: z.string(),
   summary_desc: z.string(),
@@ -501,7 +501,7 @@
 });
 type DocsTableFunctionSchemaType = z.infer<typeof TABLE_FUNCTION_SCHEMA>;
 
-const MACRO_SCHEMA = z.object({
+export const MACRO_SCHEMA = z.object({
   name: z.string(),
   desc: z.string(),
   summary_desc: z.string(),