diff --git a/joss/paper.bib b/joss/paper.bib index d861cf2e..248630b2 100644 --- a/joss/paper.bib +++ b/joss/paper.bib @@ -28,24 +28,6 @@ @SPLASH bibsource = {dblp computer science bibliography, http://dblp.org} } -@article{DBLP:journals/jocs/OrchardRO15, - author = {Orchard, Dominic and - Rice, Andrew and - Oshmyan, Oleg}, - title = {Evolving Fortran types with inferred units-of-measure}, - journal = {J. Comput. Science}, - volume = {9}, - pages = {156--162}, - year = {2015}, - url = {http://dx.doi.org/10.1016/j.jocs.2015.04.018}, - doi = {10.1016/j.jocs.2015.04.018}, - timestamp = {Mon, 13 Jul 2015 11:12:24 +0200}, - biburl = {http://dblp.dagstuhl.de/rec/bib/journals/jocs/OrchardRO15}, - bibsource = {dblp computer science bibliography, http://dblp.org} -} - - - @article{DBLP:journals/cse/ContrastinRDO16, author = {Mistral Contrastin and Andrew C. Rice and @@ -115,14 +97,21 @@ @article{DBLP:journals/corr/abs-2011-06094 } @article{orchard2017verifying, - title={Verifying spatial properties of array computations}, - author={Orchard, Dominic and Contrastin, Mistral and Danish, Matthew and Rice, Andrew}, - journal={Proceedings of the ACM on Programming Languages}, - volume={1}, - number={OOPSLA}, - pages={75}, - year={2017}, - publisher={ACM} + author = {Dominic A. Orchard and + Mistral Contrastin and + Matthew Danish and + Andrew C. Rice}, + title = {Verifying spatial properties of array computations}, + journal = {Proc. {ACM} Program. Lang.}, + volume = {1}, + number = {{OOPSLA}}, + pages = {75:1--75:30}, + year = {2017}, + url = {https://doi.org/10.1145/3133899}, + doi = {10.1145/3133899}, + timestamp = {Sat, 30 Sep 2023 10:23:24 +0200}, + biburl = {https://dblp.org/rec/journals/pacmpl/OrchardCDR17.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} } @inproceedings{contrastin2016lightning, @@ -170,27 +159,39 @@ @proceedings{DBLP:conf/iccS/2014 @article{backus1978history, - title={The history of {F}ortran {I}, {II}, and {III}}, - author={Backus, John}, - journal={ACM Sigplan Notices}, - volume={13}, - number={8}, - pages={165--180}, - year={1978}, - publisher={ACM New York, NY, USA} +author = {Backus, John}, +title = {The history of {FORTRAN I, II, and III}}, +year = {1978}, +issue_date = {August 1978}, +publisher = {Association for Computing Machinery}, +address = {New York, NY, USA}, +volume = {13}, +number = {8}, +issn = {0362-1340}, +url = {https://doi.org/10.1145/960118.808380}, +doi = {10.1145/960118.808380}, +abstract = {Before 1954 almost all programming was done in machine language or assembly language. Programmers rightly regarded their work as a complex, creative art that required human inventiveness to produce an efficient program. Much of their effort was devoted to overcoming the difficulties created by the computers of that era: the lack of index registers, the lack of built- in floating point operations, restricted instruction sets (which might have AND but not OR, for example), and primitive input- output arrangements. Given the nature of computers, the services which “automatic programming” performed for the programmer were concerned with overcoming the machine's shortcomings. Thus the primary concern of some “automatic programming” systems was to allow the use of symbolic addresses and decimal numbers (e.g., the MIDAC Input Translation Program [Brown and Carr 1954]).But most of the larger “automatic. Programming” systems (with the exception of Laning and Zierler's algebraic system [Laning and Zierler 1954] and the A-2 compiler [Remington Rand 1953; Moser 1954]) simply provided a synthetic “computer” with an order code different from that of the real machine. This synthetic computer usually had floating point instructions and index registers and had improved input-output commands; it was therefore much easier to program than its real counterpart.}, +journal = {SIGPLAN Not.}, +month = aug, +pages = {165–180}, +numpages = {16} } + @article{vanderbauwhede2022making, - title={Making legacy Fortran code type safe through automated program transformation}, - author={Vanderbauwhede, Wim}, - journal={The Journal of Supercomputing}, - volume={78}, - number={2}, - pages={2988--3028}, - year={2022}, - publisher={Springer} + author = {Wim Vanderbauwhede}, + title = {Making legacy {F}ortran code type safe through automated program transformation}, + journal = {The Journal of Supercomputing}, + volume = {78}, + number = {2}, + pages = {2988--3028}, + year = {2022}, + url = {https://doi.org/10.1007/s11227-021-03839-9}, + doi = {10.1007/S11227-021-03839-9}, + timestamp = {Tue, 08 Feb 2022 10:40:59 +0100}, + biburl = {https://dblp.org/rec/journals/tjs/Vanderbauwhede22.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} } - @inproceedings{urmaetal2014, author = {Urma, Raoul-Gabriel and Orchard, Dominic and Mycroft, Alan}, title = {Programming Language Evolution Workshop Report}, @@ -223,7 +224,8 @@ @article{walters2017met number={4}, pages={1487--1520}, year={2017}, - publisher={Copernicus GmbH} + publisher={Copernicus GmbH}, + doi={10.5194/gmd-10-1487-2017} } @misc{plusFORT, @@ -290,19 +292,29 @@ @article{vanderbauwhede2022making } @misc{danish2024incremental, - title={Incremental units-of-measure verification}, - author={Matthew Danish and Dominic Orchard and Andrew Rice}, - year={2024}, - eprint={2406.02174}, - archivePrefix={arXiv}, - primaryClass={cs.PL} + author = {Matthew Danish and + Dominic Orchard and + Andrew Rice}, + title = {Incremental units-of-measure verification}, + journal = {CoRR}, + volume = {abs/2406.02174}, + year = {2024}, + url = {https://doi.org/10.48550/arXiv.2406.02174}, + doi = {10.48550/ARXIV.2406.02174}, + eprinttype = {arXiv}, + eprint = {2406.02174}, + timestamp = {Fri, 05 Jul 2024 16:54:14 +0200}, + biburl = {https://dblp.org/rec/journals/corr/abs-2406-02174.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} } @inproceedings{mendez2014climate, - title={Climate models: challenges for Fortran development tools}, - author={M{\'e}ndez, Mariano and Tinetti, Fernando G and Overbey, Jeffrey L}, + author={Méndez, Mariano and Tinetti, Fernando G. and Overbey, Jeffrey L.}, booktitle={2014 Second International Workshop on Software Engineering for High Performance Computing in Computational Science and Engineering}, - pages={6--12}, + title={{Climate Models: Challenges for Fortran Development Tools}}, year={2014}, - organization={IEEE} -} \ No newline at end of file + volume={}, + number={}, + pages={6-12}, + keywords={Meteorology;Analytical models;Predictive models;Computational modeling;Complexity theory;Standards;Software}, + doi={10.1109/SE-HPCCSE.2014.7}} diff --git a/joss/paper.md b/joss/paper.md index 9672f9ba..f8f4568b 100644 --- a/joss/paper.md +++ b/joss/paper.md @@ -51,25 +51,24 @@ bibliography: paper.bib # Summary fortran-src is an open source Haskell library and command-line application for the lexing, parsing, -and static analysis of Fortran source code. It provides an essential -front-end interface to build other Fortran language tools, e.g., tools for +and static analysis of Fortran source code. It provides an + interface to build other Fortran language tools, e.g., for static analysis, automated refactoring, verification, and compilation. -The tool provides multiple parsers which support Fortran source code conforming to the -FORTRAN 66, FORTRAN 77, Fortran 90 and Fortran 95 standards, as well -as some legacy extensions and partial Fortran 2003 support. The -parsers generate a shared Abstract +The library provides multiple parsers which support Fortran code conforming to the +FORTRAN 66, FORTRAN 77, Fortran 90, Fortran 95 standards, +some legacy extensions, and partial Fortran 2003 support. +The parsers generate a shared Abstract Syntax Tree representation (AST), over which a variety -of core static analyses are defined to facilitate the development of +of core static analyses are defined to facilitate development of analysis and language tools. -The library has been deployed in a number of -projects in both academia and industry to help build further language tools -for manipulating Fortran. +The library has been deployed in several +language tool projects in academia and industry. # Statement of need As one of the oldest surviving programming languages [@backus1978history], Fortran -is used in a vast amount of software still in deployment. Fortran is not only a mainstay -of legacy software, but is also still used to write new software, +underpins a vast amount of software still in deployment. Fortran is not only a mainstay +of legacy software, but is also used to write new software, particularly in the sciences. Given the importance of numerical models in science, verifying the correctness of such models is critical for scientific integrity and progress. However, doing so is @@ -77,7 +76,7 @@ difficult, even more so than for traditional software; for computational models, the expected program behaviour is often unknown, uncertainty is the rule, and approximations are pervasive. Despite decades of progress in program verification within computer science, -few formal verification techniques are currently applied in scientific software. To +few formal verification techniques are applied in scientific software. To facilitate a step-change in the effectiveness of verification for computational science, a subset of the authors of this paper developed a suite of verification and static @@ -93,31 +92,30 @@ decisions [@mendez2014climate]. In 2024, Fortran re-entered the Top 10 programmi the [TIOBE Index](https://www.tiobe.com/tiobe-index/), showing its enduring popularity. The continued use of Fortran, particualarly in -scientific contexts, was the catalyst for the fortran-src software package. +scientific contexts, was the catalyst for this software package. -One of the challenges in writing language tools for Fortran is its long +A challenge in writing language tools for Fortran is its long history. There have been several major language standards (FORTRAN -I-IV, FORTRAN 66 and 77, Fortran 90, 95, 2003, 2008, 2018 and more) or +I-IV, FORTRAN 66 and 77, Fortran 90, 95, 2003, 2008, etc.) or _restandardisations_. Newer standards often deprecate features -which were known to be a ready source of error, or were difficult to +which were known to be a ready source of errors, or were difficult to specify or understand. However, compilers often support an amalgam of features across language standards, including deprecated features (@urmaetal2014). This enables developers to keep using deprecated features, or mix -a variety of language standard styles. +a variety of language standards. This complicates the task of developing new tools for manipulating Fortran source code; one must tame the weight of decades of language evolution. This package, fortran-src, provides an open-source unified core for -statically analysing Fortran code across a variety of standards, with -a focus on legacy code over cutting-edge modern Fortran. It includes +statically analysing Fortran code across language standards, with +a focus on legacy code over cutting-edge modern Fortran. It is both +a standalone tool and a library, providing a suite of standard static analyses and tools to be used as a basis for -further programming language tools and systems. It provides the core -front-end and has been released as a standalone library and tool. +further programming language tools and systems. ## Related software -A variety of other tools exist for analysing Fortran, but those we -have found are all commerical and closed source, e.g., +A variety of other tools exist for analysing Fortran, but most are commercial and closed source, e.g., plusFORT\footnote{\url{https://polyhedron.com/?product=plusfort}} (which includes the SPAG refactoring tool), the SimCon fpt tool\footnote{\url{http://simconglobal.com/fpt_summary.html}} (which includes further verification features like dimensional analysis), and Forcheck\footnote{\url{https://codework.com/solutions/developer-tools/forcheck-fortran-analysis/}}. @@ -131,9 +129,9 @@ tools for refactoring Fortran [@vanderbauwhede2022making]: open source tool for upgrading FORTRAN 77 code to Fortran 95. # Functionality -fortran-src provides the following functions over Fortran source code: +fortran-src provides the following functionality: - * lexing and parsing to an expressive abstract syntax tree; + * lexing and parsing Fortran to an expressive abstract syntax tree; * perform various static analyses; * pretty printing; * "reprinting", or patching sections of source code without removing secondary @@ -145,8 +143,7 @@ tool for running and inspecting analyses. By exporting parsed code to JSON, the parsing and standard analyses that fortran-src provides may be utilized by non-Haskell tools. -The library's top-level module is called `Language.Fortran`. As such -all submodules are within that namespace. +The library's top-level module is `Language.Fortran`; all submodules are within that namespace. ## Lexing and parsing @@ -173,8 +170,7 @@ Furthermore, the Fortran language has evolved through two broad syntactic forms: Therefore, two lexers are provided: the fixed form lexer, for handling earlier versions of the language: FORTRAN 66 and FORTRAN 77 (and additional `Legacy` and `Extended` modes), and the free form lexer, for Fortran -90 onwards. The lexers are auto-generated via the -[`alex`](https://github.com/haskell/alex) tool. +90 onwards. The fixed form lexer (`Language.Fortran.Parser.Fixed.Lexer`) handles the expectation that the first 6 columns of a line are reserved for @@ -198,6 +194,7 @@ within the `Language.Fortran.Parser.Fixed` namespace and the rest are within `Language.Fortran.Parser.Free`. A top-level module (`Language.Fortran.Parser`) provides a unified point of access to the underlying parsers. +The lexers are auto-generated via the [`alex`](https://github.com/haskell/alex) tool. The suite of parsers is automatically generated from attribute grammar definitions in the Bison format, via the [`happy`](https://github.com/haskell/happy) tool. @@ -217,16 +214,6 @@ for example, collecting information about types within the nodes of the tree, or flagging whether the particular node of the tree has been rewritten or refactored. -An interface of functions provides the ability to extract and set annotations -via the `Annotated` class, of which all AST data types are an instance: - -```haskell -class Annotated f where - getAnnotation :: f a -> a - setAnnotation :: a -> f a -> f a - modifyAnnotation :: (a -> a) -> f a -> f a -``` - Some simple transformations are provided on ASTs: * Grouping transformation, turning unstructured ASTs into structured ASTs @@ -266,6 +253,10 @@ is provided for evaluation of expressions and for semantic analysis (`Language.Fortran.Repr.Eval.Value`) leverages this representation and enables some symbolic manipulation too, essentially providing some partial evaluation. +For a demonstration of using fortran-src for static analysis, there +is a small demo tool which detects if an allocatable array is used +before it has been allocated.\footnote{\url{https://github.com/camfort/allocate-analysis-example}} + ## Pretty printing, reprinting, and rewriting A commonly required feature of language tools is to generate source code. @@ -287,329 +278,6 @@ node are stitched into the position from which they originated in the input source file. This further enables the development of refactoring tools that need to perform transformations on source code text. -# Example usage - -## Example command-line tool use - -The bundled executable `fortran-src` exposes tools for working with -Fortran source code, including inspecting analysis results (such as the program -basic blocks or inferred variable and function types) and code reformatting. - -In the following examples, the file `main.f90` is a Fortran -90-compatible program with the following content: - -```fortran -program main - implicit none - - real :: r, area - r = 1.0 - area = area_of_circle(r) - print *, area - - contains - - function area_of_circle(r) result(area) - real, parameter :: pi = 3.14 - real, intent(in) :: r - real :: area - area = r * r * pi - end function -end program -``` - -The `fortran-src` binary must be on your system path in order to -invoke it. Alternatively, if you use Stack to build the project, you -may replace the `fortran-src ` prefix with `stack run -- ` and -invoke it directly in the project directory. - -Invocations follow the common syntax `fortran-src `. You select -the command you wish to run using the relevant option. Run `fortran-src --help` -to view a built-in description of the options available. - -The extension of the input file determines which Fortran version the file is -parsed as. This may be overriden by explicitly requesting a specific version: - -``` -fortran-src main.f90 --fortranVersion=90 -``` - -Running `fortran-src` with no arguments displays the included help, which includes -an enumeration of the Fortran versions supported. - -Parse a file and view the typechecker output: `fortran-src main.f90 ---typecheck` - -``` -4:12 r Real 4 Variable -4:15 area Real 4 Variable -11:4 area_of_circle - Function -12:27 pi Real 4 Parameter -13:28 r Real 4 Variable -14:16 area Real 4 Variable -``` - -A file can be parsed and then pretty-printed back using the -fortran-src printing algorithm: `fortran-src main.f90 --reprint` - -```fortran -program main - implicit none - real :: r, area - r = 1.0e0 - area = area_of_circle(r) - print *, area - - contains - - function area_of_circle(r) result(area) - real, parameter :: pi = 3.14e0 - real, intent(in) :: r - real :: area - area = ((r * r) * pi) - end function area_of_circle -end program main -``` - -Note the printing functionality has added the additional information of the program unit name -on the `end` lines here. - -Fortran code is printed with its corresponding _source form_ i.e. FORTRAN 77 -code is printed using fixed source form, while Fortran 90 and above use free -source form. - -## Example library use: parsing and printing - -The following simple Haskell example shows how to import the general -parser module, fix the language version to Fortran 90, -parse some code into the AST, and then print it to standard output: - -```haskell -module Tmp where - -import qualified Language.Fortran.Parser as F.Parser -import qualified Language.Fortran.Version as F - -import qualified Data.ByteString.Char8 as B - -main :: IO () -main = do - v <- askFortranVersion - let parse = F.Parser.byVer v - case parse "" program of - Left err -> putStrLn $ "parse error: " <> show err - Right ast -> print ast - -askFortranVersion :: IO F.FortranVersion -askFortranVersion = return F.Fortran90 - -program :: B.ByteString -program = B.pack $ unlines $ - [ "function area_of_circle(r) result(area)" - , " real, parameter :: pi = 3.14" - , " real, intent(in) :: r" - , " real :: area" - , " area = r * r * pi" - , "end function" - , "" - , "program main" - , " print *, area_of_circle(1.0)" - , "end program" - ] -``` - -A simple way of testing this example is to install the `fortran-src` -package via cabal (i.e., `cabal install fortran-src`) to make it -available within your environment for GHC. - -## Example library use: Analysis of balanced ALLOCATE statements - -Let's say we wish to write a new Fortran code analysis using fortran-src. -Fortran 90 introduced *allocatable arrays*, which enable declaring and using -dynamic arrays in a straightforward manner. Allocatable arrays are declared only -with the scalar type and rank, omitting the upper bound: - -```fortran -integer, dimension(:), allocatable :: xs -``` - -A newly-declared allocatable begins *unallocated*. Reading from an unallocated -array is an erroneous operation. You must first `allocate` the array with -dimensions: - -```fortran -! allocate memory for an array of 5 integers -! (note that the array is not initialized) -allocate(xs(5)) -``` - -When finished, you must manually `deallocate` the array. - -```fortran -deallocate(xs) -``` - -Arrays must be deallocated before they go out of scope, or else risk leaking -memory. As an example use of fortran-src, -we show here a simple code pass that asserts this property. Since arrays -may be deallocated and re-allocated during their lifetime, we shall track the -allocatables currently in scope, and assert that all are unallocated at the end -of the program unit. Fortran being highly procedural means it lends itself to -monadic program composition, so we first design a monad that supports tracking -allocatables. (We use the `effectful` effect library here, but the details are -insignificant). The full code listing is available online.\footnote{\url{https://github.com/camfort/allocate-analysis-example}} - -```haskell -import qualified Language.Fortran.AST as F --- .... --- Declare an effectful interface for the static analysis -data Analysis :: Effect where - DeclareVar :: F.Name -> Analysis m () - - MakeVarAllocatable :: F.Name -> Analysis m () - AllocVar :: F.Name -> Analysis m () - DeallocVar :: F.Name -> Analysis m () - - AskVar :: F.Name -> Analysis m (Maybe VarState) - - -- extra: enable emitting other semi-relevant analysis info - EmitErr :: String -> Analysis m a - EmitWarn :: String -> String -> Analysis m () - --- Representation of variable information for the analysis -data VarState - -- | Declared. - = VarIsFresh - - -- | Allocatable. Counts number of times allocated. - | VarIsAllocatable AllocState Int - deriving stock Show - -data AllocState - = Allocd - | Unallocd - deriving stock Show -``` - -Now we can design a mini program in this monad by pattern matching on the -Fortran AST data types from fortran-src: - -```haskell --- Analyse statements -analyseStmt :: Analysis :> es => F.Statement a -> Eff es () -analyseStmt = \case - -- Emit declarations - F.StDeclaration _ _ _ attribs decls -> - traverse_ (declare attribs) (F.aStrip decls) - - -- Emit allocatable names - F.StAllocatable _ _ decls -> - traverse_ makeAllocatable (F.aStrip decls) - - -- Emit allocated variables - F.StAllocate _ _ _ es _ -> - traverse_ allocate (F.aStrip es) - - -- Emit deallocated variables - F.StDeallocate _ _ es _ -> - traverse_ deallocate (F.aStrip es) - - -- Check usage in any other statements - st -> analyseStmtAccess st - --- Handle a declaration -declare - :: Analysis :> es - => Maybe (F.AList F.Attribute a) -> F.Declarator a -> Eff es () -declare mAttribs d = - case F.declaratorVariable d of - F.ExpValue _ _ (F.ValVariable dv) -> do - declareVar dv - case mAttribs of - Nothing -> pure () - Just attribs -> - if attribListIncludesAllocatable (F.aStrip attribs) - then makeVarAllocatable dv - else pure () - _ -> emitWarn "bad declarator form" "ignoring" - --- Handle an allocation expression -allocate :: Analysis :> es => F.Expression a -> Eff es () -allocate = \case - F.ExpSubscript _ _ (F.ExpValue _ _ (F.ValVariable v)) _dims -> - allocVar v - _ -> emitWarn "unsupported ALLOCATE form" "ignoring" - --- Handle a deallocation expression -deallocate :: Analysis :> es => F.Expression a -> Eff es () -deallocate = \case - F.ExpValue _ _ (F.ValVariable v) -> - deallocVar v - _ -> emitWarn "unsupported DEALLOCATE form" "ignoring" -``` - -We wish to evaluate this mini program to receive a report of the allocatable -variables and whether they were properly deallocated. A state monad holding a -map of variable names to `VarState` entries can implement this, and we bolt this on -top of `IO` for easy emission of warnings and errors. -The `runAnalysis` function then handles the effect interface, using -the stateful map to store information about our variables, i.e., whether -they are allocatable, allocated, deallocated, or neither: - -```haskell --- 'F.Name' is the type synonym for variable names -type Ctx = Map F.Name VarState - -runAnalysis - :: (IOE :> es, State Ctx :> es) - => Eff (Analysis : es) a - -> Eff es a --- e.g. @'AskVar' v@ gets mapped to @'Map.lookup' v ctx@ -``` - -For the sake of brevity, we include just the code for handling the deallocation operation. -To handle deallocations, we look up the deallocated -variable in the map and report on various behaviours that would be program errors -in the Fortran code: -(1) the deallocated variable does not exist; -(2) the deallocated variable is not allocatable; -(3) the deallocated variable is allocatable but has not been allocated. - -Lastly (4) is a non-buggy situation where the deallocated variable -is allocatable and is allocated, but is not marked as unallocated. - -```haskell - DeallocVar v -> do - st <- State.get - case Map.lookup v st of - -- (1) Variable was never declared - Nothing -> err "tried to deallocate undeclared var" - - -- ... variable is declared - Just vst -> - - case vst of - -- (2) Variable is not allocatable - VarIsFresh -> err "tried to deallocate unallocatable var" - - -- ... variable is allocatable - VarIsAllocatable vstAllocState vstAllocCount -> - - -- Check its state - case vstAllocState of - - -- (3) Trying to deallocate unallocated variable - Unallocd -> err "tried to deallocate unallocated var" - - -- (4) Deallocating allocated variable - Allocd -> do - let vst' = VarIsAllocatable Unallocd vstAllocCount - State.put $ Map.insert v vst' st -``` - -Note, this analysis does not handle control flow operators. Further work may involve -tracking allocatable status specially in data flow analyses. - # Work building on fortran-src ## CamFort @@ -657,9 +325,7 @@ loops defining those induction variables). CamFort has been previously deployed at the Met Office, with its analysing tooling run on the Unified Model (@walters2017met) to ensure internal code quality standards are met. -## Further analyses building on fortran-src - -### fortran-vars memory model library +## fortran-vars memory model library `fortran-vars` is a static analysis library built on top of `fortran-src`. Many static analysis questions depend on knowing the value and type of @@ -673,7 +339,7 @@ Fortran 77 are statically allocated by default. Data flow analysis, such as constant propagation analysis, can be conducted based on memory locations instead of variable names. -### Nonstandard INTEGER refactoring +## Nonstandard INTEGER refactoring Outside of CamFort, fortran-src has been used to build other (closed source) refactoring tools to help migration and improve the quality @@ -681,35 +347,9 @@ of large legacy codebases, building on top of the library's AST, analysis, and reprinting features. One example of this has been an effort to fix a number of issues regarding the -use of integers used where logical types are expected. There are four main -issues with this: - -* Use of integers with logical operators, which often behave as bitwise - functions in a way that propagate up in compound logical expressions; -* Assignment of integers to logicals, some compilers 'normalizing' the value; -* Use of integers in conditionals, the evaluation of which varies by compiler; -* Use of arithmetic operators with logicals, which can have unexpected results - when the underlying value is not a 0 or 1. - -Combined, this can lead to some very unexpected behaviour, such as the following -snippet: - -```fortran - integer is_foo - if (.not. is_foo()) then -c important code to *not* run when foo is true -c ... - endif -``` - -Even if `is_foo` returns the expected values of 0 and 1, the `.not.` performs as -a bitwise not, and then, if the compiler determines it to be true by being not -equal to 0, then this will *always* return true. - -In order to fix these cases, additional tooling was deployed to gather information -about whether a given function returned a 0 or 1. With this, a tool was written +use of integers used where logical types are expected. A tool was written to refactor many expressions by using the fortran-vars typechecker to find -integer expressions and normalise them using `.ne. 0` while flagging anything +integer expressions and normalise them while flagging anything potentially changing behaviour for further manual inspection. These might be situations in which some code is hard to statically analyse but safe, or it may have uncovered an existing bug. The tool uncovered many such bugs in a particular codebase during this effort, including several in the @@ -722,8 +362,6 @@ behaviour. Ongoing efforts are using fortran-src to remove the patches on top of GFortran, as well as to introduce interfaces for more robust type checking in this code base. ---- - # Project maintenance and documentation fortran-src may be built and used on Windows, Mac and Linux systems using a recent version of the