Add a presubmit that functions names are snake_case

R=lalitm@google.com

Change-Id: Ic55ba07d4307cd97d3d854f79b41c2927b81ce34
diff --git a/python/generators/stdlib_docs/parse.py b/python/generators/stdlib_docs/parse.py
index 0bb4c28..26b3c49 100644
--- a/python/generators/stdlib_docs/parse.py
+++ b/python/generators/stdlib_docs/parse.py
@@ -27,6 +27,15 @@
 from python.generators.stdlib_docs.utils import COLUMN_ANNOTATION_PATTERN
 
 
+def is_internal(name: str) -> bool:
+  return re.match(r'^internal_.*', name, re.IGNORECASE)
+
+
+def is_snake_case(s: str) -> bool:
+  """Returns true if the string is snake_case."""
+  return re.fullmatch(r'^[a-z_0-9]*$', s) is not None
+
+
 class AbstractDocParser(ABC):
 
   @dataclass
@@ -45,7 +54,7 @@
     module_pattern = f"^{self.module}_.*"
     if upper:
       module_pattern = module_pattern.upper()
-    starts_with_module_name = re.match(module_pattern, self.name)
+    starts_with_module_name = re.match(module_pattern, self.name, re.IGNORECASE)
     if self.module == "common":
       if starts_with_module_name:
         self._error('Names of tables/views/functions in the "common" module '
@@ -155,27 +164,54 @@
         f'{error}')
 
 
+class TableOrView:
+  name: str
+  type: str
+  desc: str
+  cols: Dict[str, str]
+
+  def __init__(self, name, type, desc, cols):
+    self.name = name
+    self.type = type
+    self.desc = desc
+    self.cols = cols
+
+
 class TableViewDocParser(AbstractDocParser):
   """Parses documentation for CREATE TABLE and CREATE VIEW statements."""
 
   def __init__(self, path: str, module: str):
     super().__init__(path, module)
 
-  def parse(self, doc: DocsExtractor.Extract) -> Optional[Dict[str, Any]]:
+  def parse(self, doc: DocsExtractor.Extract) -> Optional[TableOrView]:
     assert doc.obj_kind == ObjKind.table_view
 
-    # Ignore internal tables and views.
     self.name = doc.obj_match[1]
-    if re.match(r'^internal_.*', self.name):
+    if is_internal(self.name):
       return None
 
     self._validate_only_contains_annotations(doc.annotations, {'@column'})
-    return {
-        'name': self._parse_name(),
-        'type': doc.obj_match[0],
-        'desc': self._parse_desc_not_empty(doc.description),
-        'cols': self._parse_columns(doc.annotations, ''),
-    }
+    return TableOrView(
+        name=self._parse_name(),
+        type=doc.obj_match[0],
+        desc=self._parse_desc_not_empty(doc.description),
+        cols=self._parse_columns(doc.annotations, ''),
+    )
+
+
+class Function:
+  name: str
+  desc: str
+  args: Dict[str, Any]
+  return_type: str
+  return_desc: str
+
+  def __init__(self, name, desc, args, return_type, return_desc):
+    self.name = name
+    self.desc = desc
+    self.args = args
+    self.return_type = return_type
+    self.return_desc = return_desc
 
 
 class FunctionDocParser(AbstractDocParser):
@@ -184,23 +220,42 @@
   def __init__(self, path: str, module: str):
     super().__init__(path, module)
 
-  def parse(self, doc: DocsExtractor.Extract) -> Optional[Dict[str, Any]]:
+  def parse(self, doc: DocsExtractor.Extract) -> Optional[Function]:
     self.name, args, ret, _ = doc.obj_match
 
     # Ignore internal functions.
-    if re.match(r'^INTERNAL_.*', self.name):
+    if is_internal(self.name):
       return None
 
     self._validate_only_contains_annotations(doc.annotations, {'@arg', '@ret'})
 
     ret_type, ret_desc = self._parse_ret(doc.annotations, ret)
-    return {
-        'name': self._parse_name(upper=True),
-        'desc': self._parse_desc_not_empty(doc.description),
-        'args': self._parse_args(doc.annotations, args),
-        'return_type': ret_type,
-        'return_desc': ret_desc,
-    }
+    name = self._parse_name(upper=True)
+
+    if not is_snake_case(name):
+      self._error('Function name %s is not snake_case (should be %s) ' %
+                  (name, name.casefold()))
+
+    return Function(
+        name=self._parse_name(upper=True),
+        desc=self._parse_desc_not_empty(doc.description),
+        args=self._parse_args(doc.annotations, args),
+        return_type=ret_type,
+        return_desc=ret_desc,
+    )
+
+
+class TableFunction:
+  name: str
+  desc: str
+  cols: Dict[str, str]
+  args: Dict[str, Any]
+
+  def __init__(self, name, desc, cols, args):
+    self.name = name
+    self.desc = desc
+    self.cols = cols
+    self.args = args
 
 
 class ViewFunctionDocParser(AbstractDocParser):
@@ -209,26 +264,39 @@
   def __init__(self, path: str, module: str):
     super().__init__(path, module)
 
-  def parse(self, doc: DocsExtractor.Extract) -> Optional[Dict[str, Any]]:
+  def parse(self, doc: DocsExtractor.Extract) -> TableFunction:
     self.name, args, columns, _ = doc.obj_match
 
     # Ignore internal functions.
-    if re.match(r'^INTERNAL_.*', self.name):
+    if is_internal(self.name):
       return None
 
     self._validate_only_contains_annotations(doc.annotations,
                                              {'@arg', '@column'})
-    return {
-        'name': self._parse_name(upper=True),
-        'desc': self._parse_desc_not_empty(doc.description),
-        'cols': self._parse_columns(doc.annotations, columns),
-        'args': self._parse_args(doc.annotations, args),
-    }
+    return TableFunction(
+        name=self._parse_name(upper=True),
+        desc=self._parse_desc_not_empty(doc.description),
+        cols=self._parse_columns(doc.annotations, columns),
+        args=self._parse_args(doc.annotations, args),
+    )
+
+
+class ParsedFile:
+  errors: List[str] = []
+  table_views: List[TableOrView] = []
+  functions: List[Function] = []
+  table_functions: List[TableFunction] = []
+
+  def __init__(self, errors, table_views, functions, view_functions):
+    self.errors = errors
+    self.table_views = table_views
+    self.functions = functions
+    self.table_functions = view_functions
 
 
 # Reads the provided SQL and, if possible, generates a dictionary with data
 # from documentation together with errors from validation of the schema.
-def parse_file_to_dict(path: str, sql: str) -> Union[Dict[str, Any], List[str]]:
+def parse_file(path: str, sql: str) -> ParsedFile:
   if sys.platform.startswith('win'):
     path = path.replace('\\', '/')
 
@@ -239,7 +307,7 @@
   extractor = DocsExtractor(path, module_name, sql)
   docs = extractor.extract()
   if extractor.errors:
-    return extractor.errors
+    return ParsedFile(extractor.errors, [], [], [])
 
   # Parse the extracted docs.
   errors = []
@@ -266,8 +334,4 @@
         view_functions.append(res)
       errors += parser.errors
 
-  return errors if errors else {
-      'imports': table_views,
-      'functions': functions,
-      'view_functions': view_functions
-  }
+  return ParsedFile(errors, table_views, functions, view_functions)
diff --git a/python/generators/stdlib_docs/utils.py b/python/generators/stdlib_docs/utils.py
index 52ddfac..bc01c32 100644
--- a/python/generators/stdlib_docs/utils.py
+++ b/python/generators/stdlib_docs/utils.py
@@ -16,8 +16,7 @@
 import re
 from typing import Dict, List
 
-LOWER_NAME = r'[a-z_\d]+'
-UPPER_NAME = r'[A-Z_\d]+'
+NAME = r'[a-zA-Z_\d]+'
 ANY_WORDS = r'[^\s].*'
 ANY_NON_QUOTE = r'[^\']*.*'
 TYPE = r'[A-Z]+'
@@ -28,11 +27,11 @@
     # Match create table/view and catch type
     fr'CREATE{WS}(?:VIRTUAL )?{WS}(TABLE|VIEW){WS}(?:IF NOT EXISTS)?{WS}'
     # Catch the name
-    fr'{WS}({LOWER_NAME}){WS}(?:AS|USING)?{WS}.*')
+    fr'{WS}({NAME}){WS}(?:AS|USING)?{WS}.*')
 
 CREATE_FUNCTION_PATTERN = (
     # Function name: we are matching everything [A-Z]* between ' and ).
-    fr"CREATE{WS}PERFETTO{WS}FUNCTION{WS}({UPPER_NAME}){WS}"
+    fr"CREATE{WS}PERFETTO{WS}FUNCTION{WS}({NAME}){WS}"
     # Args: anything before closing bracket.
     fr"{WS}\({WS}({ANY_WORDS}){WS}\){WS}"
     # Type: [A-Z]* between two '.
@@ -43,7 +42,7 @@
 CREATE_VIEW_FUNCTION_PATTERN = (
     fr"SELECT{WS}CREATE_VIEW_FUNCTION\({WS}"
     # Function name: we are matching everything [A-Z]* between ' and ).
-    fr"{WS}'{WS}({UPPER_NAME}){WS}\({WS}"
+    fr"{WS}'{WS}({NAME}){WS}\({WS}"
     # Args: anything before closing bracket with '.
     fr"{WS}({ANY_WORDS}){WS}\){WS}'{WS},{WS}"
     # Return columns: anything between two '.
@@ -64,9 +63,9 @@
     ObjKind.view_function: CREATE_VIEW_FUNCTION_PATTERN,
 }
 
-COLUMN_ANNOTATION_PATTERN = fr'^\s*({LOWER_NAME})\s*({ANY_WORDS})'
+COLUMN_ANNOTATION_PATTERN = fr'^\s*({NAME})\s*({ANY_WORDS})'
 
-NAME_AND_TYPE_PATTERN = fr'\s*({LOWER_NAME})\s+({TYPE})\s*'
+NAME_AND_TYPE_PATTERN = fr'\s*({NAME})\s+({TYPE})\s*'
 
 ARG_ANNOTATION_PATTERN = fr'\s*{NAME_AND_TYPE_PATTERN}\s+({ANY_WORDS})'
 
diff --git a/python/test/stdlib_unittest.py b/python/test/stdlib_unittest.py
index 649cf4d..821087b 100644
--- a/python/test/stdlib_unittest.py
+++ b/python/test/stdlib_unittest.py
@@ -20,7 +20,7 @@
     os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 sys.path.append(os.path.join(ROOT_DIR))
 
-from python.generators.stdlib_docs.parse import parse_file_to_dict
+from python.generators.stdlib_docs.parse import parse_file
 
 DESC = """--
 -- First line.
@@ -49,42 +49,42 @@
 class TestStdlib(unittest.TestCase):
 
   def test_valid_table(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 {COLS_STR}
 CREATE TABLE foo_table AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, dict)
+    self.assertListEqual(res.errors, [])
 
-    table = res['imports'][0]
-    self.assertEqual(table['name'], 'foo_table')
-    self.assertEqual(table['desc'], 'First line. Second line.')
-    self.assertEqual(table['type'], 'TABLE')
-    self.assertEqual(table['cols'], {
+    table = res.table_views[0]
+    self.assertEqual(table.name, 'foo_table')
+    self.assertEqual(table.desc, 'First line. Second line.')
+    self.assertEqual(table.type, 'TABLE')
+    self.assertEqual(table.cols, {
         'slice_id': 'Id of slice.',
         'slice_name': 'Name of slice.'
     })
 
   def test_valid_function(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 {ARGS_STR}
 {RET_STR}
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, dict)
+    self.assertListEqual(res.errors, [])
 
-    fn = res['functions'][0]
-    self.assertEqual(fn['name'], 'FOO_FN')
-    self.assertEqual(fn['desc'], 'First line. Second line.')
+    fn = res.functions[0]
+    self.assertEqual(fn.name, 'foo_fn')
+    self.assertEqual(fn.desc, 'First line. Second line.')
     self.assertEqual(
-        fn['args'], {
+        fn.args, {
             'utid': {
                 'type': 'INT',
                 'desc': 'Utid of thread.',
@@ -94,11 +94,11 @@
                 'desc': 'String name.',
             },
         })
-    self.assertEqual(fn['return_type'], 'BOOL')
-    self.assertEqual(fn['return_desc'], 'Exists.')
+    self.assertEqual(fn.return_type, 'BOOL')
+    self.assertEqual(fn.return_desc, 'Exists.')
 
   def test_valid_view_function(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 {ARGS_STR}
@@ -109,13 +109,13 @@
   '{SQL_STR}'
 );
     '''.strip())
-    assert isinstance(res, dict)
+    self.assertListEqual(res.errors, [])
 
-    fn = res['view_functions'][0]
-    self.assertEqual(fn['name'], 'FOO_VIEW_FN')
-    self.assertEqual(fn['desc'], 'First line. Second line.')
+    fn = res.table_functions[0]
+    self.assertEqual(fn.name, 'FOO_VIEW_FN')
+    self.assertEqual(fn.desc, 'First line. Second line.')
     self.assertEqual(
-        fn['args'], {
+        fn.args, {
             'utid': {
                 'type': 'INT',
                 'desc': 'Utid of thread.',
@@ -125,33 +125,35 @@
                 'desc': 'String name.',
             },
         })
-    self.assertEqual(fn['cols'], {
+    self.assertEqual(fn.cols, {
         'slice_id': 'Id of slice.',
         'slice_name': 'Name of slice.'
     })
 
   def test_missing_module_name(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 {COLS_STR}
 CREATE TABLE bar_table AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting an error: function prefix (bar) not matching module name (foo).
+    self.assertEqual(len(res.errors), 1)
 
   def test_common_does_not_include_module_name(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'common/bar.sql', f'''
 {DESC}
 {COLS_STR}
 CREATE TABLE common_table AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting an error: functions in common/ should not have a module prefix.
+    self.assertEqual(len(res.errors), 1)
 
   def test_cols_typo(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 --
@@ -160,10 +162,11 @@
 CREATE TABLE bar_table AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting an error: column slice_id2 not found in the table.
+    self.assertEqual(len(res.errors), 1)
 
   def test_cols_no_desc(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 --
@@ -172,54 +175,62 @@
 CREATE TABLE bar_table AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting an error: column slice_id is missing a description.
+    self.assertEqual(len(res.errors), 1)
 
   def test_args_typo(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 --
--- @arg utid2 INT              Uint.
+-- @arg utid2 INT             Uint.
 -- @arg name STRING           String name.
 {RET_STR}
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting 2 errors:
+    # - arg utid2 not found in the function (should be utid);
+    # - utid not documented.
+    self.assertEqual(len(res.errors), 2)
 
   def test_args_no_desc(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 --
 -- @arg utid INT
 -- @arg name STRING           String name.
 {RET_STR}
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting 2 errors:
+    # - arg utid is missing a description;
+    # - arg utid is not documented.
+    self.assertEqual(len(res.errors), 2)
 
   def test_ret_no_desc(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 {ARGS_STR}
 --
 -- @ret BOOL
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, list)
+    # Expecting an error: return value is missing a description.
+    self.assertEqual(len(res.errors), 1)
 
   def test_multiline_desc(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 -- This
 -- is
@@ -232,18 +243,18 @@
 -- description.
 {ARGS_STR}
 {RET_STR}
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, dict)
+    self.assertListEqual(res.errors, [])
 
-    fn = res['functions'][0]
-    self.assertEqual(fn['desc'], 'This is a very long description.')
+    fn = res.functions[0]
+    self.assertEqual(fn.desc, 'This is a very long description.')
 
   def test_multiline_arg_desc(self):
-    res = parse_file_to_dict(
+    res = parse_file(
         'foo/bar.sql', f'''
 {DESC}
 --
@@ -255,16 +266,15 @@
 --                             which spans across multiple lines
 -- inconsistently.
 {RET_STR}
-CREATE PERFETTO FUNCTION FOO_FN({ARGS_SQL_STR})
+CREATE PERFETTO FUNCTION foo_fn({ARGS_SQL_STR})
 RETURNS {RET_SQL_STR}
 AS
 {SQL_STR};
     '''.strip())
-    assert isinstance(res, dict)
 
-    fn = res['functions'][0]
+    fn = res.functions[0]
     self.assertEqual(
-        fn['args'], {
+        fn.args, {
             'utid': {
                 'type': 'INT',
                 'desc': 'Uint spread across lines.',
@@ -275,3 +285,17 @@
                         'inconsistently.',
             },
         })
+
+  def test_function_name_style(self):
+    res = parse_file(
+        'foo/bar.sql', f'''
+{DESC}
+{ARGS_STR}
+{RET_STR}
+CREATE PERFETTO FUNCTION foo_SnakeCase({ARGS_SQL_STR})
+RETURNS {RET_SQL_STR}
+AS
+{SQL_STR};
+    '''.strip())
+    # Expecting an error: function name should be using hacker_style.
+    self.assertEqual(len(res.errors), 1)
\ No newline at end of file
diff --git a/src/trace_processor/importers/ftrace/ftrace_parser.cc b/src/trace_processor/importers/ftrace/ftrace_parser.cc
index 9773b44..2f0be1f 100644
--- a/src/trace_processor/importers/ftrace/ftrace_parser.cc
+++ b/src/trace_processor/importers/ftrace/ftrace_parser.cc
@@ -1746,7 +1746,7 @@
     auto new_utid = proc_tracker->GetOrCreateThread(new_tid);
 
     ThreadStateTracker::GetOrCreate(context_)->PushNewTaskEvent(
-      timestamp, new_utid, source_utid);
+        timestamp, new_utid, source_utid);
     return;
   }
 
diff --git a/src/trace_processor/importers/ftrace/thread_state_tracker.cc b/src/trace_processor/importers/ftrace/thread_state_tracker.cc
index 5e76514..7eda59e 100644
--- a/src/trace_processor/importers/ftrace/thread_state_tracker.cc
+++ b/src/trace_processor/importers/ftrace/thread_state_tracker.cc
@@ -53,11 +53,11 @@
                                          UniqueTid utid,
                                          UniqueTid waker_utid,
                                          std::optional<uint16_t> common_flags) {
-  // If thread has not had a sched switch event, just open a runnable state. There's no
-  // pending state to close.
+  // If thread has not had a sched switch event, just open a runnable state.
+  // There's no pending state to close.
   if (!HasPreviousRowNumbersForUtid(utid)) {
-      AddOpenState(event_ts, utid, runnable_string_id_, std::nullopt, waker_utid,
-               common_flags);
+    AddOpenState(event_ts, utid, runnable_string_id_, std::nullopt, waker_utid,
+                 common_flags);
     return;
   }
 
diff --git a/src/trace_processor/importers/ftrace/thread_state_tracker_unittest.cc b/src/trace_processor/importers/ftrace/thread_state_tracker_unittest.cc
index b3bc965..1b0e1a3 100644
--- a/src/trace_processor/importers/ftrace/thread_state_tracker_unittest.cc
+++ b/src/trace_processor/importers/ftrace/thread_state_tracker_unittest.cc
@@ -193,16 +193,16 @@
   tracker_->PushSchedSwitchEvent(7, CPU_A, 0, StringIdOf(kRunnable), 18);
 
   auto rows_it = ThreadStateIterator();
-  VerifyThreadState(rows_it, 1, 2, 8, kRunnable,
-                    std::nullopt, std::nullopt, 11);
+  VerifyThreadState(rows_it, 1, 2, 8, kRunnable, std::nullopt, std::nullopt,
+                    11);
   VerifyThreadState(++rows_it, 2, 3, 8, kRunning);
   VerifyThreadState(++rows_it, 2, std::nullopt, 11, "S");
   VerifyThreadState(++rows_it, 3, std::nullopt, 8, "S");
   VerifyThreadState(++rows_it, 4, std::nullopt, 17771, "S");
   VerifyThreadState(++rows_it, 4, 5, 17772, kRunning);
   VerifyThreadState(++rows_it, 5, std::nullopt, 17772, "S");
-  VerifyThreadState(++rows_it, 6, 7, 18, kRunnable,
-                    std::nullopt, std::nullopt, 0);
+  VerifyThreadState(++rows_it, 6, 7, 18, kRunnable, std::nullopt, std::nullopt,
+                    0);
   VerifyThreadState(++rows_it, 7, std::nullopt, 18, kRunning);
 }
 
diff --git a/src/trace_processor/metrics/sql/android/BUILD.gn b/src/trace_processor/metrics/sql/android/BUILD.gn
index 444fd95..28eb9d0 100644
--- a/src/trace_processor/metrics/sql/android/BUILD.gn
+++ b/src/trace_processor/metrics/sql/android/BUILD.gn
@@ -60,11 +60,11 @@
     "android_task_names.sql",
     "android_trace_quality.sql",
     "android_trusty_workqueues.sql",
+    "codec_metrics.sql",
     "composer_execution.sql",
     "composition_layers.sql",
     "counter_span_view_merged.sql",
     "cpu_info.sql",
-    "codec_metrics.sql",
     "display_metrics.sql",
     "frame_missed.sql",
     "g2d.sql",
diff --git a/src/trace_processor/perfetto_sql/stdlib/android/io.sql b/src/trace_processor/perfetto_sql/stdlib/android/io.sql
index 91271be..2cdf1b9 100644
--- a/src/trace_processor/perfetto_sql/stdlib/android/io.sql
+++ b/src/trace_processor/perfetto_sql/stdlib/android/io.sql
@@ -33,7 +33,7 @@
   AVG(counter.value) AS avg
 FROM counter
 JOIN counter_track
-  ON counter_track.id = counter.track_id AND counter_track.name LIKE '%f2fs%'
+  ON counter_track.id = counter.track_id AND counter_track.name GLOB '*f2fs*'
 GROUP BY name
 ORDER BY sum DESC;
 
@@ -59,7 +59,7 @@
       EXTRACT_ARG(arg_set_id, 'ino') AS ino,
       EXTRACT_ARG(arg_set_id, 'copied') AS copied
     FROM raw
-    WHERE name LIKE 'f2fs_write_end%'
+    WHERE name GLOB 'f2fs_write_end*'
   )
 SELECT
   thread.utid,
diff --git a/src/trace_processor/perfetto_sql/stdlib/android/thread.sql b/src/trace_processor/perfetto_sql/stdlib/android/thread.sql
index bd1e416..3d6fa46 100644
--- a/src/trace_processor/perfetto_sql/stdlib/android/thread.sql
+++ b/src/trace_processor/perfetto_sql/stdlib/android/thread.sql
@@ -13,13 +13,9 @@
 -- See the License for the specific language governing permissions and
 -- limitations under the License.
 
-SELECT CREATE_FUNCTION(
-    'INTERNAL_THREAD_PREFIX(thread_name STRING)',
-    'STRING',
-    '
-    SELECT STR_SPLIT(STR_SPLIT(STR_SPLIT(STR_SPLIT($thread_name, "-", 0), "[", 0), ":", 0), " ", 0);
-    '
-);
+CREATE PERFETTO FUNCTION internal_thread_prefix(thread_name STRING)
+RETURNS STRING AS
+SELECT STR_SPLIT(STR_SPLIT(STR_SPLIT(STR_SPLIT($thread_name, "-", 0), "[", 0), ":", 0), " ", 0);
 
 -- Per process stats of threads created in a process
 --
diff --git a/src/trace_processor/perfetto_sql/stdlib/common/args.sql b/src/trace_processor/perfetto_sql/stdlib/common/args.sql
index 4835bf8..7361e93 100644
--- a/src/trace_processor/perfetto_sql/stdlib/common/args.sql
+++ b/src/trace_processor/perfetto_sql/stdlib/common/args.sql
@@ -35,11 +35,8 @@
 -- @arg arg_set_id INT  Id of the arg set.
 -- @arg key STRING      Key of the argument.
 -- @ret STRING          Formatted value of the argument.
-SELECT CREATE_FUNCTION(
-'FORMATTED_ARG(arg_set_id INT, key STRING)',
-'STRING',
-'
+CREATE PERFETTO FUNCTION formatted_arg(arg_set_id INT, key STRING)
+RETURNS STRING AS
 SELECT display_value
 FROM args
-WHERE arg_set_id = $arg_set_id AND key = $key
-');
\ No newline at end of file
+WHERE arg_set_id = $arg_set_id AND key = $key;
\ No newline at end of file
diff --git a/src/trace_processor/perfetto_sql/stdlib/experimental/thread_executing_span.sql b/src/trace_processor/perfetto_sql/stdlib/experimental/thread_executing_span.sql
index 5cbc049..7bb1344 100644
--- a/src/trace_processor/perfetto_sql/stdlib/experimental/thread_executing_span.sql
+++ b/src/trace_processor/perfetto_sql/stdlib/experimental/thread_executing_span.sql
@@ -490,7 +490,7 @@
 -- @arg thread_state_id INT   Id of the thread_state to get the thread_executing_span id for
 -- @ret INT                   thread_executing_span id
 CREATE PERFETTO FUNCTION
-EXPERIMENTAL_THREAD_EXECUTING_SPAN_ID_FROM_THREAD_STATE_ID(thread_state_id INT)
+experimental_thread_executing_span_id_from_thread_state_id(thread_state_id INT)
 RETURNS INT AS
 WITH t AS (
   SELECT
diff --git a/test/trace_processor/diff_tests/android/tests.py b/test/trace_processor/diff_tests/android/tests.py
index 19677c3..b035f18 100644
--- a/test/trace_processor/diff_tests/android/tests.py
+++ b/test/trace_processor/diff_tests/android/tests.py
@@ -601,15 +601,15 @@
       """))
 
   def test_f2fs_aggregate_write_stats(self):
-      return DiffTestBlueprint(
+    return DiffTestBlueprint(
         trace=DataPath('android_monitor_contention_trace.atr'),
-        query= """
+        query="""
         SELECT IMPORT('android.io');
         SELECT total_write_count, distinct_processes, total_bytes_written,
                distinct_device_count, distict_inode_count, distinct_thread_count
         FROM android_io_f2fs_aggregate_write_stats
         """,
-        out= Csv("""
+        out=Csv("""
         "total_write_count","distinct_processes","total_bytes_written","distinct_device_count","distict_inode_count","distinct_thread_count"
         203,3,375180,1,13,6
         """))
diff --git a/tools/check_sql_modules.py b/tools/check_sql_modules.py
index 4fb349e..05b77e7 100755
--- a/tools/check_sql_modules.py
+++ b/tools/check_sql_modules.py
@@ -17,22 +17,26 @@
 # 'internal_' is documented with proper schema.
 
 import argparse
+from typing import List, Tuple
 import os
 import sys
+import re
 
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 sys.path.append(os.path.join(ROOT_DIR))
 
-from python.generators.stdlib_docs.parse import parse_file_to_dict
+from python.generators.stdlib_docs.parse import ParsedFile, parse_file
 
 
 def main():
   parser = argparse.ArgumentParser()
   parser.add_argument(
       '--stdlib-sources',
-      default=os.path.join(ROOT_DIR, "src", "trace_processor", "stdlib"))
+      default=os.path.join(ROOT_DIR, "src", "trace_processor", "perfetto_sql",
+                           "stdlib"))
   args = parser.parse_args()
   errors = []
+  modules: List[Tuple[str, str, ParsedFile]] = []
   for root, _, files in os.walk(args.stdlib_sources, topdown=True):
     for f in files:
       path = os.path.join(root, f)
@@ -41,29 +45,30 @@
       with open(path, 'r') as f:
         sql = f.read()
 
-      res = parse_file_to_dict(path, sql)
-      errors += res if isinstance(res, list) else []
+      parsed = parse_file(path, sql)
+      modules.append((path, sql, parsed))
+
+  functions = set()
+
+  for path, sql, parsed in modules:
+    errors += parsed.errors
+
+    lines = [l.strip() for l in sql.split('\n')]
+    for line in lines:
+      # Strip the SQL comments.
+      line = re.sub(r'--.*$', '', line)
 
       # Ban the use of LIKE in non-comment lines.
-      lines = [l.strip() for l in sql.split('\n')]
-      for line in lines:
-        if line.startswith('--'):
-          continue
-
-        if 'like' in line.casefold():
-          errors.append('LIKE is banned in trace processor metrics. '
-                        'Prefer GLOB instead.')
-          errors.append('Offending file: %s' % path)
+      if 'like' in line.casefold():
+        errors.append('LIKE is banned in trace processor metrics. '
+                      'Prefer GLOB instead.')
+        errors.append('Offending file: %s' % path)
 
       # Ban the use of CREATE_FUNCTION.
-      for line in lines:
-        if line.startswith('--'):
-          continue
-
-        if 'create_function' in line.casefold():
-          errors.append('CREATE_FUNCTION is deprecated in trace processor. '
-                        'Prefer CREATE PERFETTO FUNCTION instead.')
-          errors.append('Offending file: %s' % path)
+      if 'create_function' in line.casefold():
+        errors.append('CREATE_FUNCTION is deprecated in trace processor. '
+                      'Prefer CREATE PERFETTO FUNCTION instead.')
+        errors.append('Offending file: %s' % path)
 
   sys.stderr.write("\n".join(errors))
   sys.stderr.write("\n")
diff --git a/tools/gen_stdlib_docs_json.py b/tools/gen_stdlib_docs_json.py
index dcb4023..ee99675 100755
--- a/tools/gen_stdlib_docs_json.py
+++ b/tools/gen_stdlib_docs_json.py
@@ -23,7 +23,7 @@
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 sys.path.append(os.path.join(ROOT_DIR))
 
-from python.generators.stdlib_docs.parse import parse_file_to_dict
+from python.generators.stdlib_docs.parse import parse_file
 
 
 def main():
@@ -70,16 +70,35 @@
     module_name = path.split("/")[0]
     import_key = path.split(".sql")[0].replace("/", ".")
 
-    docs = parse_file_to_dict(path, sql)
-    if isinstance(docs, list):
-      for d in docs:
-        print(d)
+    docs = parse_file(path, sql)
+    if len(docs.errors) > 0:
+      for e in docs.errors:
+        print(e)
       return 1
 
-    assert isinstance(docs, dict)
-    if not any(docs.values()):
-      continue
-    file_dict = {'import_key': import_key, **docs}
+    file_dict = {
+        'import_key':
+            import_key,
+        'imports': [{
+            'name': table.name,
+            'desc': table.desc,
+            'type': table.type,
+            'cols': table.cols,
+        } for table in docs.table_views],
+        'functions': [{
+            'name': function.name,
+            'desc': function.desc,
+            'args': function.args,
+            'return_type': function.return_type,
+            'return_desc': function.return_desc,
+        } for function in docs.functions],
+        'view_functions': [{
+            'name': function.name,
+            'desc': function.desc,
+            'args': function.args,
+            'cols': function.cols,
+        } for function in docs.table_functions],
+    }
     modules[module_name].append(file_dict)
 
   with open(args.json_out, 'w+') as f: