Skip to content

Commit

Permalink
Merge branch 'develop' into add_shell_tool_clean
Browse files Browse the repository at this point in the history
  • Loading branch information
hiker committed Mar 4, 2025
2 parents 89f1ba1 + 0829610 commit 0cb2cf1
Show file tree
Hide file tree
Showing 29 changed files with 471 additions and 315 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ authors = [
license = {file = 'LICENSE.txt'}
dynamic = ['version', 'readme']
requires-python = '>=3.7, <4'
dependencies = ['fparser']
dependencies = ['fparser >= 0.2']
classifiers = [
'Development Status :: 1 - Planning',
'Environment :: Console',
Expand Down
6 changes: 1 addition & 5 deletions run_configs/lfric/atm.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@
from fab.tools import ToolBox

from grab_lfric import lfric_source_config, gpl_utils_source_config
from lfric_common import (API, configurator, fparser_workaround_stop_concatenation,
get_transformation_script)
from lfric_common import (API, configurator, get_transformation_script)

logger = logging.getLogger('fab')

Expand Down Expand Up @@ -250,9 +249,6 @@ def file_filtering(config):
api=API,
)

# todo: do we need this one in here?
fparser_workaround_stop_concatenation(state)

analyse(
state,
root_symbol='lfric_atm',
Expand Down
5 changes: 1 addition & 4 deletions run_configs/lfric/gungho.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
from fab.tools import ToolBox

from grab_lfric import lfric_source_config, gpl_utils_source_config
from lfric_common import (API, configurator, fparser_workaround_stop_concatenation,
get_transformation_script)
from lfric_common import (API, configurator, get_transformation_script)

logger = logging.getLogger('fab')

Expand Down Expand Up @@ -75,8 +74,6 @@
api=API,
)

fparser_workaround_stop_concatenation(state)

analyse(
state,
root_symbol='gungho_model',
Expand Down
32 changes: 0 additions & 32 deletions run_configs/lfric/lfric_common.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import logging
import os
import shutil
from typing import Optional
from pathlib import Path

from fab.artefacts import ArtefactSet
from fab.build_config import BuildConfig
from fab.steps import step
from fab.steps.find_source_files import find_source_files
Expand Down Expand Up @@ -84,36 +82,6 @@ def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_c
find_source_files(config, source_root=config_dir)


# ============================================================================
@step
def fparser_workaround_stop_concatenation(config):
"""
fparser can't handle string concat in a stop statement. This step is
a workaround.
https://github.com/stfc/fparser/issues/330
"""
feign_path = None
for file_path in config.artefact_store[ArtefactSet.FORTRAN_BUILD_FILES]:
if file_path.name == 'feign_config_mod.f90':
feign_path = file_path
break
else:
raise RuntimeError("Could not find 'feign_config_mod.f90'.")

# rename "broken" version
broken_version = feign_path.with_suffix('.broken')
shutil.move(feign_path, broken_version)

# make fixed version
bad = "_config: '// &\n 'Unable to close temporary file'"
good = "_config: Unable to close temporary file'"

open(feign_path, 'wt').write(
open(broken_version, 'rt').read().replace(bad, good))


# ============================================================================
def get_transformation_script(fpath: Path,
config: BuildConfig) -> Optional[Path]:
Expand Down
4 changes: 1 addition & 3 deletions run_configs/lfric/mesh_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from fab.steps.psyclone import psyclone, preprocess_x90
from fab.tools import ToolBox

from lfric_common import API, configurator, fparser_workaround_stop_concatenation
from lfric_common import API, configurator
from grab_lfric import lfric_source_config, gpl_utils_source_config


Expand Down Expand Up @@ -60,8 +60,6 @@
api=API,
)

fparser_workaround_stop_concatenation(state)

analyse(
state,
root_symbol=['cubedsphere_mesh_generator', 'planar_mesh_generator', 'summarise_ugrid'],
Expand Down
42 changes: 24 additions & 18 deletions source/fab/parse/c.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,44 +11,48 @@
from pathlib import Path
from typing import List, Optional, Union, Tuple

from fab.dep_tree import AnalysedDependent

try:
import clang # type: ignore
import clang.cindex # type: ignore
except ImportError:
clang = None

from fab.build_config import BuildConfig
from fab.dep_tree import AnalysedDependent
from fab.util import log_or_dot, file_checksum

logger = logging.getLogger(__name__)


class AnalysedC(AnalysedDependent):
"""
An analysis result for a single C file, containing symbol definitions and dependencies.
An analysis result for a single C file, containing symbol definitions and
dependencies.
Note: We don't need to worry about compile order with pure C projects; we can compile all in one go.
However, with a *Fortran -> C -> Fortran* dependency chain, we do need to ensure that one Fortran file
is compiled before another, so this class must be part of the dependency tree analysis.
Note: We don't need to worry about compile order with pure C projects; we
can compile all in one go. However, with a *Fortran -> C -> Fortran*
dependency chain, we do need to ensure that one Fortran file is
compiled before another, so this class must be part of the
dependency tree analysis.
"""
# Note: This subclass adds nothing to it's parent, which provides everything it needs.
# We'd normally remove an irrelevant class like this but we want to keep the door open
# for filtering analysis results by type, rather than suffix.
pass
# Note: This subclass adds nothing to it's parent, which provides
# everything it needs. We'd normally remove an irrelevant class
# like this but we want to keep the door open for filtering
# analysis results by type, rather than suffix.


class CAnalyser(object):
class CAnalyser:
"""
Identify symbol definitions and dependencies in a C file.
"""

def __init__(self):
def __init__(self, config: BuildConfig):

# runtime
self._config = None
self._config = config
self._include_region: List[Tuple[int, str]] = []

# todo: simplifiy by passing in the file path instead of the analysed tokens?
def _locate_include_regions(self, trans_unit) -> None:
Expand Down Expand Up @@ -100,8 +104,7 @@ def _check_for_include(self, lineno) -> Optional[str]:
include_stack.pop()
if include_stack:
return include_stack[-1]
else:
return None
return None

def run(self, fpath: Path) \
-> Union[Tuple[AnalysedC, Path], Tuple[Exception, None]]:
Expand Down Expand Up @@ -149,9 +152,11 @@ def run(self, fpath: Path) \
continue
logger.debug('Considering node: %s', node.spelling)

if node.kind in {clang.cindex.CursorKind.FUNCTION_DECL, clang.cindex.CursorKind.VAR_DECL}:
if node.kind in {clang.cindex.CursorKind.FUNCTION_DECL,
clang.cindex.CursorKind.VAR_DECL}:
self._process_symbol_declaration(analysed_file, node, usr_symbols)
elif node.kind in {clang.cindex.CursorKind.CALL_EXPR, clang.cindex.CursorKind.DECL_REF_EXPR}:
elif node.kind in {clang.cindex.CursorKind.CALL_EXPR,
clang.cindex.CursorKind.DECL_REF_EXPR}:
self._process_symbol_dependency(analysed_file, node, usr_symbols)
except Exception as err:
logger.exception(f'error walking parsed nodes {fpath}')
Expand All @@ -166,7 +171,8 @@ def _process_symbol_declaration(self, analysed_file, node, usr_symbols):
if node.is_definition():
# only global symbols can be used by other files, not static symbols
if node.linkage == clang.cindex.LinkageKind.EXTERNAL:
# This should catch function definitions which are exposed to the rest of the application
# This should catch function definitions which are exposed to
# the rest of the application
logger.debug(' * Is defined in this file')
# todo: ignore if inside user pragmas?
analysed_file.add_symbol_def(node.spelling)
Expand Down
41 changes: 10 additions & 31 deletions source/fab/parse/fortran.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from pathlib import Path
from typing import Union, Optional, Iterable, Dict, Any, Set

from fparser.common.readfortran import FortranStringReader # type: ignore
from fparser.two.Fortran2003 import ( # type: ignore
Entity_Decl_List, Use_Stmt, Module_Stmt, Program_Stmt, Subroutine_Stmt, Function_Stmt, Language_Binding_Spec,
Char_Literal_Constant, Interface_Block, Name, Comment, Module, Call_Stmt, Derived_Type_Def, Derived_Type_Stmt,
Expand All @@ -21,6 +20,7 @@
from fparser.two.Fortran2008 import ( # type: ignore
Type_Declaration_Stmt, Attr_Spec_List)

from fab.build_config import BuildConfig
from fab.dep_tree import AnalysedDependent
from fab.parse.fortran_common import iter_content, _has_ancestor_type, _typed_child, FortranAnalyserBase
from fab.util import file_checksum, string_checksum
Expand Down Expand Up @@ -167,15 +167,21 @@ class FortranAnalyser(FortranAnalyserBase):
A build step which analyses a fortran file using fparser2, creating an :class:`~fab.dep_tree.AnalysedFortran`.
"""
def __init__(self, std=None, ignore_mod_deps: Optional[Iterable[str]] = None):
def __init__(self,
config: BuildConfig,
std: Optional[str] = None,
ignore_mod_deps: Optional[Iterable[str]] = None):
"""
:param config: The BuildConfig to use.
:param std:
The Fortran standard.
:param ignore_mod_deps:
Module names to ignore in use statements.
"""
super().__init__(result_class=AnalysedFortran, std=std)
super().__init__(config=config,
result_class=AnalysedFortran,
std=std)
self.ignore_mod_deps: Iterable[str] = list(ignore_mod_deps or [])
self.depends_on_comment_found = False

Expand Down Expand Up @@ -295,33 +301,6 @@ def _process_comment(self, analysed_file, obj):
# without .o means a fortran symbol
else:
analysed_file.add_symbol_dep(dep)
if comment[:2] == "!$":
# Check if it is a use statement with an OpenMP sentinel:
# Use fparser's string reader to discard potential comment
# TODO #327: once fparser supports reading the sentinels,
# this can be removed.
# fparser issue: https://github.com/stfc/fparser/issues/443
reader = FortranStringReader(comment[2:])
try:
line = reader.next()
except StopIteration:
# No other item, ignore
return
try:
# match returns a 5-tuple, the third one being the module name
module_name = Use_Stmt.match(line.strline)[2]
module_name = module_name.string
except Exception:
# Not a use statement in a sentinel, ignore:
return

# Register the module name
if module_name in self.ignore_mod_deps:
logger.debug(f"ignoring use of {module_name}")
return
if module_name.lower() not in self._intrinsic_modules:
# found a dependency on fortran
analysed_file.add_module_dep(module_name)

def _process_subroutine_or_function(self, analysed_file, fpath, obj):
# binding?
Expand Down Expand Up @@ -353,7 +332,7 @@ def _process_subroutine_or_function(self, analysed_file, fpath, obj):
analysed_file.add_symbol_def(name.string)


class FortranParserWorkaround(object):
class FortranParserWorkaround():
"""
Use this class to create a workaround when the third-party Fortran parser is unable to process a valid source file.
Expand Down
Loading

0 comments on commit 0cb2cf1

Please sign in to comment.