File suse-kabi-tools-0.2.0+git12.d8ab89d.obscpio of Package suse-kabi-tools

07070100000000000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002B00000000suse-kabi-tools-0.2.0+git12.d8ab89d/.cargo07070100000001000081A400000000000000000000000167CF095300000034000000000000000000000000000000000000003700000000suse-kabi-tools-0.2.0+git12.d8ab89d/.cargo/config.toml[build]
rustdocflags = ["--document-private-items"]
07070100000002000041ED00000000000000000000000367CF095300000000000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/.github07070100000003000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000003600000000suse-kabi-tools-0.2.0+git12.d8ab89d/.github/workflows07070100000004000081A400000000000000000000000167CF095300000410000000000000000000000000000000000000003D00000000suse-kabi-tools-0.2.0+git12.d8ab89d/.github/workflows/ci.ymlname: Continuous integration

on: push

env:
  CARGO_TERM_COLOR: always

jobs:
  build_and_test:
    name: Build and test
    runs-on: ubuntu-latest
    container: opensuse/tumbleweed
    strategy:
      fail-fast: false
      matrix:
        toolchain:
          - distribution
          - stable
          - beta
          - nightly
    steps:
      - name: Check out the repository
        uses: actions/checkout@v4
      - name: Install the ${{ matrix.toolchain }} Rust toolchain
        shell: bash
        run: |
          if [ ${{ matrix.toolchain }} == distribution ]; then
            zypper --non-interactive install cargo rust
          else
            zypper --non-interactive install rustup
            rustup update ${{ matrix.toolchain }}
            rustup default ${{ matrix.toolchain }}
          fi
      - name: Build the project
        run: cargo build
      - name: Run tests
        run: cargo test
      - name: Check documentation
        env:
          RUSTDOCFLAGS: -D warnings
        run: cargo doc --no-deps
07070100000005000081A400000000000000000000000167CF095300000503000000000000000000000000000000000000004000000000suse-kabi-tools-0.2.0+git12.d8ab89d/.github/workflows/pages.ymlname: Deploy documentation to GitHub Pages

on:
  push:
    branches: ["main"]

  # Allow to run this workflow manually from the Actions tab.
  workflow_dispatch:

# Allow only one concurrent deployment, but don't cancel any in-progress runs.
concurrency:
  group: "pages"
  cancel-in-progress: false

jobs:
  build:
    name: Build content
    runs-on: ubuntu-latest
    container: opensuse/tumbleweed
    steps:
      - name: Check out the repository
        uses: actions/checkout@v4
      - name: Install build dependencies
        run: zypper --non-interactive install groff-full
      - name: Render man pages to HTML
        run: |
          mkdir pages
          groff -mandoc -Thtml doc/ksymtypes.1 > pages/ksymtypes.1.html
          groff -mandoc -Thtml doc/ksymtypes.5 > pages/ksymtypes.5.html
      - name: Upload the content as artifact
        uses: actions/upload-pages-artifact@v3
        with:
          path: pages/

  deploy:
    name: Deploy to GitHub Pages
    environment:
      name: github-pages
      url: ${{ steps.deployment.outputs.page_url }}
    runs-on: ubuntu-latest
    permissions:
      pages: write
      id-token: write
    needs: build
    steps:
      - name: Deploy to GitHub Pages
        id: deployment
        uses: actions/deploy-pages@v4
07070100000006000081A400000000000000000000000167CF095300000008000000000000000000000000000000000000002F00000000suse-kabi-tools-0.2.0+git12.d8ab89d/.gitignore/target
07070100000007000081A400000000000000000000000167CF0953000046AC000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/COPYING                    GNU GENERAL PUBLIC LICENSE
                       Version 2, June 1991

 Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The licenses for most software are designed to take away your
freedom to share and change it.  By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users.  This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it.  (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.)  You can apply it to
your programs, too.

  When we speak of free software, we are referring to freedom, not
price.  Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.

  To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.

  For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have.  You must make sure that they, too, receive or can get the
source code.  And you must show them these terms so they know their
rights.

  We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.

  Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software.  If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.

  Finally, any free program is threatened constantly by software
patents.  We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary.  To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.

  The precise terms and conditions for copying, distribution and
modification follow.

                    GNU GENERAL PUBLIC LICENSE
   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

  0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License.  The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language.  (Hereinafter, translation is included without limitation in
the term "modification".)  Each licensee is addressed as "you".

Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope.  The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.

  1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.

You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.

  2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:

    a) You must cause the modified files to carry prominent notices
    stating that you changed the files and the date of any change.

    b) You must cause any work that you distribute or publish, that in
    whole or in part contains or is derived from the Program or any
    part thereof, to be licensed as a whole at no charge to all third
    parties under the terms of this License.

    c) If the modified program normally reads commands interactively
    when run, you must cause it, when started running for such
    interactive use in the most ordinary way, to print or display an
    announcement including an appropriate copyright notice and a
    notice that there is no warranty (or else, saying that you provide
    a warranty) and that users may redistribute the program under
    these conditions, and telling the user how to view a copy of this
    License.  (Exception: if the Program itself is interactive but
    does not normally print such an announcement, your work based on
    the Program is not required to print an announcement.)

These requirements apply to the modified work as a whole.  If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works.  But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.

Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.

In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.

  3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:

    a) Accompany it with the complete corresponding machine-readable
    source code, which must be distributed under the terms of Sections
    1 and 2 above on a medium customarily used for software interchange; or,

    b) Accompany it with a written offer, valid for at least three
    years, to give any third party, for a charge no more than your
    cost of physically performing source distribution, a complete
    machine-readable copy of the corresponding source code, to be
    distributed under the terms of Sections 1 and 2 above on a medium
    customarily used for software interchange; or,

    c) Accompany it with the information you received as to the offer
    to distribute corresponding source code.  (This alternative is
    allowed only for noncommercial distribution and only if you
    received the program in object code or executable form with such
    an offer, in accord with Subsection b above.)

The source code for a work means the preferred form of the work for
making modifications to it.  For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable.  However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.

If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.

  4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License.  Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.

  5. You are not required to accept this License, since you have not
signed it.  However, nothing else grants you permission to modify or
distribute the Program or its derivative works.  These actions are
prohibited by law if you do not accept this License.  Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.

  6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions.  You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.

  7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License.  If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all.  For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.

If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.

It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices.  Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.

This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.

  8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded.  In such case, this License incorporates
the limitation as if written in the body of this License.

  9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time.  Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

Each version is given a distinguishing version number.  If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation.  If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.

  10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission.  For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this.  Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.

                            NO WARRANTY

  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.

  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.

                     END OF TERMS AND CONDITIONS

            How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

  To do so, attach the following notices to the program.  It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) <year>  <name of author>

    This program is free software; you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation; either version 2 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License along
    with this program; if not, write to the Free Software Foundation, Inc.,
    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

Also add information on how to contact you by electronic and paper mail.

If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:

    Gnomovision version 69, Copyright (C) year name of author
    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
    This is free software, and you are welcome to redistribute it
    under certain conditions; type `show c' for details.

The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License.  Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.

You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary.  Here is a sample; alter the names:

  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
  `Gnomovision' (which makes passes at compilers) written by James Hacker.

  <signature of Ty Coon>, 1 April 1989
  Ty Coon, President of Vice

This General Public License does not permit incorporating your program into
proprietary programs.  If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library.  If this is what you want to do, use the GNU Lesser General
Public License instead of this License.
07070100000008000081A400000000000000000000000167CF09530000009F000000000000000000000000000000000000002F00000000suse-kabi-tools-0.2.0+git12.d8ab89d/Cargo.lock# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "suse-kabi-tools"
version = "0.1.0"
07070100000009000081A400000000000000000000000167CF0953000000D6000000000000000000000000000000000000002F00000000suse-kabi-tools-0.2.0+git12.d8ab89d/Cargo.toml# Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
# SPDX-License-Identifier: GPL-2.0-or-later

[package]
name = "suse-kabi-tools"
version = "0.1.0"
authors = ["Petr Pavlu <petr.pavlu@suse.com>"]
edition = "2021"
0707010000000A000081A400000000000000000000000167CF0953000004B5000000000000000000000000000000000000002E00000000suse-kabi-tools-0.2.0+git12.d8ab89d/README.md# suse-kabi-tools

## Overview

suse-kabi-tools is a set of Application Binary Interface (ABI) tools for the Linux kernel.

The project currently contains the following tools:

* ksymtypes &ndash; a tool to work with symtypes files which are produced by [genksyms][genksyms]
  during the Linux kernel build. It allows to consolidate multiple symtypes files into a single file
  and to compare symtypes data. For details, see the manual pages [ksymtypes(1)][ksymtypes_1] and
  [ksymtypes(5)][ksymtypes_5].
* suse-kabi &ndash; a wrapper used by RPM when building SUSE kernels

## Installation

Ready-to-install packages for (open)SUSE distributions are available in [the Kernel:tools
project][kernel_tools] in the openSUSE Build Service.

To build the project locally, install a Rust toolchain and run `cargo build`.

## License

This project is released under the terms of [the GPLv2 License](COPYING).

[genksyms]: https://github.com/torvalds/linux/tree/master/scripts/genksyms
[ksymtypes_1]: https://suse.github.io/suse-kabi-tools/ksymtypes.1.html
[ksymtypes_5]: https://suse.github.io/suse-kabi-tools/ksymtypes.5.html
[kernel_tools]: https://build.opensuse.org/package/show/Kernel:tools/suse-kabi-tools
0707010000000B000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002800000000suse-kabi-tools-0.2.0+git12.d8ab89d/doc0707010000000C000081A400000000000000000000000167CF095300000D0B000000000000000000000000000000000000003400000000suse-kabi-tools-0.2.0+git12.d8ab89d/doc/ksymtypes.1.\" Copyright (C) 2024-2025 SUSE LLC <petr.pavlu@suse.com>
.\" SPDX-License-Identifier: GPL-2.0-or-later
.TH KSYMTYPES 1
.SH NAME
ksymtypes \- a tool to work with Linux\-kernel symtypes files
.SH SYNOPSIS
\fBksymtypes\fR [\fIGENERAL\-OPTION\fR...] {\fBconsolidate\fR | \fBcompare\fR } [\fICOMMAND\-OPTION\fR...]
.SH DESCRIPTION
\fBksymtypes\fR is a tool that provides functionality to work with symtypes files. These files
describe the Application Binary Interface (ABI) of the kernel and its modules. The data is produced
by \fBgenksyms\fR utilities from the kernel tree.
.PP
The tool primarily operates with sets of symtypes files as they are produced during a single build
of the Linux kernel. Each such set describes the ABI of a specific kernel and its modules. The tool
refers to this set as a "symtypes corpus".
.PP
The provided functionality is split into several integrated commands. The currently available
commands are \fBconsolidate\fR and \fBcompare\fR. The \fBconsolidate\fR command takes a symtypes
corpus composed of a set of symtypes files and produces its consolidated variant by merging
duplicated types. The \fBcompare\fR command shows differences between two symtypes corpuses.
.SH GENERAL OPTIONS
.TP
\fB\-d\fR, \fB\-\-debug\fR
Enable debug output.
.TP
\fB\-h\fR, \fB\-\-help\fR
Display global help information and exit.
.TP
\fB\-\-version\fR
Output version information and exit.
.SH CONSOLIDATE COMMAND
\fBksymtypes\fR \fBconsolidate\fR [\fICONSOLIDATE\-OPTION\fR...] \fIPATH\fR
.PP
The \fBconsolidate\fR command reads symtypes files from the path specified on the command line,
consolidates their contents by merging duplicate types and writes the output to the specified file.
The input path should point to a directory that the command recursively searches for all symtypes
files. In a typical use case, this will be a build directory of the Linux kernel.
.PP
Available options:
.TP
\fB\-h\fR, \fB\-\-help\fR
Display help information for the command and exit.
.TP
\fB\-j\fR \fINUM\fR, \fB\-\-jobs\fR=\fINUM\fR
Use \fINUM\fR workers to perform the operation simultaneously.
.TP
\fB\-o\fR \fIFILE\fR, \fB\-\-output\fR=\fIFILE\fR
Write the result in \fIFILE\fR, instead of the standard output.
.SH COMPARE COMMAND
\fBksymtypes\fR \fBcompare\fR [\fICOMPARE\-OPTION\fR...] \fIPATH\fR \fIPATH2\fR
.PP
The \fBcompare\fR command shows differences between two symtypes corpuses. A corpus can be specified
by a directory containing symtypes files or by a consolidated symtypes file. In a typical use case,
the first input will point to a reference consolidated symtypes corpus and the second input will
point to data from a new build of the Linux kernel.
.PP
Available options:
.TP
\fB\-h\fR, \fB\-\-help\fR
Display help information for the command and exit.
.TP
\fB\-j\fR \fINUM\fR, \fB\-\-jobs\fR=\fINUM\fR
Use \fINUM\fR workers to perform the operation simultaneously.
.SH EXAMPLES
Build the Linux kernel and create a reference consolidated symtypes corpus:
.IP
.EX
$ cd <linux\-kernel\-directory>
$ make O=build ...
$ ksymtypes consolidate \-\-output=reference.kabi build/
.EE
.PP
Build a new version of the Linux kernel and compare its ABI with the previous reference:
.IP
.EX
$ cd <linux\-kernel\-directory> && git pull
$ make O=build ...
$ ksymtypes compare reference.kabi build/
.EE
.SH SEE ALSO
\fBksymtypes\fR(5)
0707010000000D000081A400000000000000000000000167CF0953000010E5000000000000000000000000000000000000003400000000suse-kabi-tools-0.2.0+git12.d8ab89d/doc/ksymtypes.5.\" Copyright (C) 2024-2025 SUSE LLC <petr.pavlu@suse.com>
.\" SPDX-License-Identifier: GPL-2.0-or-later
.TH KSYMTYPES 5
.SH NAME
symtypes \- Linux-kernel ABI definition database
.SH DESCRIPTION
Symtypes files provide information about Application Binary Interface (ABI) in the Linux kernel. The
\fBksymtypes\fR utility recognizes two variants of this format, the base and its own consolidated
variant.
.PP
The base format describes exported functions, variables and their dependent types as known in a
single object file. The data is generated by \fBgenksyms\fR utilities from the kernel tree.
.PP
The consolidated format extends the base format to efficiently describe types across multiple object
files. This allows to have one file for the entire kernel ABI. The format is generated by the
\fBksymtypes\fR \fBconsolidate\fR command.
.SH BASE FORMAT
A symtypes file consists of type records, one per a line. Each record is comprised of a type
identifier and an associated type description, separated by a whitespace.
.PP
A type identifier can be one of the following:
.RS
.IP \[bu] 2
<exported-name> \(en an exported function or variable definition (no prefix),
.IP \[bu] 2
t#<typedef-name> \(en a typedef definition,
.IP \[bu] 2
e#<enum-name> \(en an enumeration definition,
.IP \[bu] 2
s#<struct-name> \(en a structure definition,
.IP \[bu] 2
u#<union-name> \(en a union definition,
.IP \[bu] 2
E#<enum-constant-name> \(en an enumerator definition.
.PP
.RE
A type description consists of a list of tokens, separated by a whitespace. A single token can be
a literal value directly contributing to the type definition or a type reference.
References are in the form "<x>#<type-name>" and point to another type defined in the file.
.PP
A type name can be optionally enclosed in single quotes, both when when definiting the type and when
referencing it. This allows the type name to contain spaces.
.PP
.SH CONSOLIDATED FORMAT
The consolidated format extends the base format with variant suffixes and file records.
.PP
Each type definition can have additionally a suffix in the form "@<variant>". This allows to have
different definitions of the same type in the consolidated file.
.PP
A file record is identified by "F#<file-name>". Its description lists types and exports found in
a given file. The types must include their variant if multiple definitions of a specific type are
present in the consolidated file.
A type that has only one variant in the entire consolidated file can be omitted on the file record
to save space. Its presence can be implicitly determined
by recursively walking all exports in the specific file.
.PP
Type references found in other records do not use the variant suffix. An actual type must be
determined based on the context in what file the reference is made.
.PP
.SH EXAMPLES
The following example shows two files \fIa.symtypes\fR and \fI.b.symtypes\fR using the base format.
The first file \fIa.symtypes\fR records an export of the function "baz" that takes as its parameters
the structure "foo" and a pointer to the union "bar", with both types having a full definition. The
second file \fIb.symtypes\fR records an export of the function "qux" that takes as its parameters
the structure "foo" and a pointer to the union "bar", with the former having a full definition and
the latter being an opaque declaration.
.IP
.EX
$ cat example/a.symtypes
s#foo struct foo { int m ; }
u#bar union bar { int i; float f; }
baz void baz ( s#foo a1 , u#bar * a2 )
.EE
.PP
.IP
.EX
$ cat example/b.symtypes
s#foo struct foo { int m ; }
u#bar union bar { UNKNOWN }
qux void qux ( s#foo a1 , u#bar * a2 )
.EE
.PP
The following example shows file \fIc.symtypes\fR that is produced by consolidating the previous two
files \fIa.symtypes\fR and \fIb.symtypes\fR. The structure type "foo" which was same in both file is
merged, the union type "bar" appears in two different variants. New "F#" records indicate which
types are specific to each file.
.IP
.EX
$ ksymtypes consolidate --output=example/c.kabi example/
$ cat example/c.kabi
s#foo struct foo { int m ; }
u#bar@0 union bar { int i; float f; }
u#bar@1 union bar { UNKNOWN }
baz void baz ( s#foo a1 , u#bar * a2 )
qux void qux ( s#foo a1 , u#bar * a2 )
F#example/a.symtypes u#bar@0 baz
F#example/b.symtypes u#bar@1 qux
.EE
.SH SEE ALSO
\fBksymtypes\fR(1)
0707010000000E000081ED00000000000000000000000167CF095300001287000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/kabi.pl#!/usr/bin/perl
use strict;
use warnings;

use Getopt::Long qw(:config no_ignore_case);
use Data::Dumper;

# ( { sym => regexp, mod => regexp, fail => 0/1 }, ... )
my @rules;
my ($opt_verbose, $opt_rules);

# if Module.symvers also lists namespaces (>=5.4)
my $use_namespaces;

sub load_rules {
	my $file = shift;
	my $errors = 0;

	xopen(my $fh, '<', $file);
	while (<$fh>) {
		chomp;
		s/#.*//;
		next if /^\s*$/;
		my ($pattern, $verdict) = split(/\s+/);
		my $new = {};
		if (uc($verdict) eq "PASS") {
			$new->{fail} = 0;
		} elsif (uc($verdict) eq "FAIL") {
			$new->{fail} = 1;
		} else {
			print STDERR "$file:$.: invalid verdict \"$verdict\", must be either PASS or FAIL.\n";
			$errors++;
			next;
		}
		# simple glob -> regexp conversion
		$pattern =~ s/\*/.*/g;
		$pattern =~ s/\?/./g;
		$pattern =~ s/.*/^$&\$/;

		# If it matches a module path or vmlinux
		if ($pattern =~ /\/|^vmlinux$/) {
			$new->{mod} = $pattern;
		# If it's not a path and the string is all uppercase, assume it's a namespace
		} elsif ($use_namespaces &&
			$pattern !~ /\// && $pattern eq uc($pattern)) {
			$new->{namespace} = $pattern;
		} else {
			$new->{sym} = $pattern;
		}
		push(@rules, $new);
	}
	if ($errors && !@rules) {
		print STDERR "error: only garbage found in $file.\n";
		exit 1;
	}
	close($fh);
}

# Return 1 if using new (>=5.4) Module.symvers format with namespaces
sub symvers_uses_namespaces {
	my $file = shift;
	xopen(my $fh, '<', $file);
	my $line =  <$fh>;
	chomp $line;

	# The new (>=5.4) Module.symvers format has 4 tabs (5 fields):
	#
	#    crc\tsymbol\tmodule\texport_type\tnamespace
	#
	# The older Module.symvers format only has 3 tabs (4 fields):
	#
	#    crc\tsymbol\tmodule\texport_type

	my $num_tabs = $line =~ tr/\t//;
	if ($num_tabs > 3) {
		return 1;
	} else {
		return 0;
	}
}

sub load_symvers {
	my $file = shift;
	my %res;
	my $errors = 0;
	my $new;

	xopen(my $fh, '<', $file);
	while (<$fh>) {
		chomp;
		my @l = split(/\t/, $_, -1);
		if (@l < 4) {
			print STDERR "$file:$.: unknown line\n";
			$errors++;
			next;
		}
		if ($use_namespaces) {
			$new = { crc => $l[0], mod => $l[2], type => $l[3], namespace => $l[4] };
		} else {
			$new = { crc => $l[0], mod => $l[2], type => $l[3] };
		}
		$res{$l[1]} = $new;
	}
	if (!%res) {
		print STDERR "error: no symvers found in $file.\n";
		exit 1;
	}
	close($fh);
	return %res;
}

# Each bit represents a restriction of the export and adding a restriction
# fails the check
my $type_GPL    = 0x1;
my $type_NOW    = 0x2;
my $type_UNUSED = 0x4;
my %types = (
	EXPORT_SYMBOL            => 0x0,
	EXPORT_SYMBOL_GPL        => $type_GPL | $type_NOW,
	EXPORT_SYMBOL_GPL_FUTURE => $type_GPL,
	EXPORT_UNUSED_SYMBOL     => $type_UNUSED,
	EXPORT_UNUSED_SYMBOL_GPL => $type_UNUSED | $type_GPL | $type_NOW
);

sub type_compatible {
	my ($old, $new) = @_;

	for my $type ($old, $new) {
		if (!exists($types{$type})) {
			print STDERR "error: unrecognized export type $type.\n";
			exit 1;
		}
	}
	# if $new has a bit set that $old does not -> fail
	return !(~$types{$old} & $types{$new});
}

my $kabi_errors = 0;
sub kabi_change {
	my ($sym, $symvers, $message) = @_;
	my $fail = 1;

	for my $rule (@rules) {
		if ($rule->{mod} && $symvers->{mod} =~ $rule->{mod} ||
		    $rule->{sym} && $sym =~ $rule->{sym} ||
			($use_namespaces && $rule->{namespace} &&
				$symvers->{namespace} =~ $rule->{namespace})) {
			$fail = $rule->{fail};
			last;
		}
	}
	return unless $fail or $opt_verbose;

	print STDERR "KABI: symbol $sym(mod:$symvers->{mod}";
	if ($use_namespaces && $symvers->{namespace}) {
		print STDERR " ns:$symvers->{namespace}";
	}
	print STDERR ") $message";
	if ($fail) {
		$kabi_errors++;
		print STDERR "\n";
	} else {
		print STDERR " (tolerated)\n";
	}
}

sub xopen {
	open($_[0], $_[1], @_[2..$#_]) or die "$_[2]: $!\n";
}

my $res = GetOptions(
	'verbose|v' => \$opt_verbose,
	'rules|r=s' => \$opt_rules,
);
if (!$res || @ARGV != 2) {
	print STDERR "Usage: $0 [--rules <rules file>] Module.symvers.old Module.symvers\n";
	exit 1;
}

# Determine symvers format
$use_namespaces = symvers_uses_namespaces($ARGV[0]);

if (defined($opt_rules)) {
	load_rules($opt_rules);
}
my %old = load_symvers($ARGV[0]);
my %new = load_symvers($ARGV[1]);

for my $sym (sort keys(%old)) {
	if (!$new{$sym}) {
		kabi_change($sym, $old{$sym}, "lost");
	} elsif ($old{$sym}->{crc} ne $new{$sym}->{crc}) {
		kabi_change($sym, $old{$sym}, "changed crc from " .
			"$old{$sym}->{crc} to $new{$sym}->{crc}");
	} elsif (!type_compatible($old{$sym}->{type}, $new{$sym}->{type})) {
		kabi_change($sym, $old{$sym}, "changed type from " .
			"$old{$sym}->{type} to $new{$sym}->{type}");
	}
}
if ($kabi_errors) {
	print STDERR "KABI: aborting due to kabi changes.\n";
	exit 1;
}
exit 0;
0707010000000F000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/package07070100000010000081A400000000000000000000000167CF09530000069F000000000000000000000000000000000000004100000000suse-kabi-tools-0.2.0+git12.d8ab89d/package/suse-kabi-tools.spec#
# spec file for package suse-kabi-tools
#
# Copyright (c) 2025 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
# upon. The license for this file, and modifications and additions to the
# file, is the same license as for the pristine package itself (unless the
# license for the pristine package is not an Open Source License, in which
# case the license is the MIT License). An "Open Source License" is a
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.

# Please submit bugfixes or comments via https://bugs.opensuse.org/
#


Name:           suse-kabi-tools
Version:        0
Release:        0
Summary:        A set of ABI tools for the Linux kernel
Group:          System/Kernel
License:        GPL-2.0-or-later
URL:            https://github.com/SUSE/suse-kabi-tools
Source:         %{name}-%{version}.tar.zst
BuildRequires:  cargo
BuildRequires:  cargo-packaging
Requires:       perl

%description
suse-kabi-tools is a set of Application Binary Interface (ABI) tools for the
Linux kernel.

%prep
%autosetup -p1

%build
%{cargo_build}

%install
%{cargo_install}
install -D -m 0644 %{_builddir}/%{name}-%{version}/doc/ksymtypes.1 %{buildroot}%{_mandir}/man1/ksymtypes.1
install -D -m 0644 %{_builddir}/%{name}-%{version}/doc/ksymtypes.5 %{buildroot}%{_mandir}/man5/ksymtypes.5
install -D -m 0755 kabi.pl %{buildroot}/%{_bindir}/suse-kabi

%check
%{cargo_test}

%files
%license COPYING
%{_bindir}/ksymtypes
%{_bindir}/suse-kabi
%{_mandir}/man1/ksymtypes.1%{?ext_man}
%{_mandir}/man5/ksymtypes.5%{?ext_man}

%changelog

07070100000011000041ED00000000000000000000000567CF095300000000000000000000000000000000000000000000002800000000suse-kabi-tools-0.2.0+git12.d8ab89d/src07070100000012000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/bin07070100000013000081A400000000000000000000000167CF095300002C22000000000000000000000000000000000000003900000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/bin/ksymtypes.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use std::path::Path;
use std::time::Instant;
use std::{env, io, process};
use suse_kabi_tools::sym::SymCorpus;
use suse_kabi_tools::{debug, init_debug_level};

/// An elapsed timer to measure time of some operation.
///
/// The time is measured between when the object is instantiated and when it is dropped. A message
/// with the elapsed time is output when the object is dropped.
enum Timing {
    Active { desc: String, start: Instant },
    Inactive,
}

impl Timing {
    fn new(do_timing: bool, desc: &str) -> Self {
        if do_timing {
            Timing::Active {
                desc: desc.to_string(),
                start: Instant::now(),
            }
        } else {
            Timing::Inactive
        }
    }
}

impl Drop for Timing {
    fn drop(&mut self) {
        match self {
            Timing::Active { desc, start } => {
                eprintln!("{}: {:.3?}", desc, start.elapsed());
            }
            Timing::Inactive => {}
        }
    }
}

/// Prints the global usage message on the standard output.
fn print_usage() {
    print!(concat!(
        "Usage: ksymtypes [OPTION...] COMMAND\n",
        "\n",
        "Options:\n",
        "  -d, --debug                   enable debug output\n",
        "  -h, --help                    display this help and exit\n",
        "  --version                     output version information and exit\n",
        "\n",
        "Commands:\n",
        "  consolidate                   consolidate symtypes into a single file\n",
        "  compare                       show differences between two symtypes corpuses\n",
    ));
}

/// Prints the version information on the standard output.
fn print_version() {
    println!("ksymtypes {}", env!("CARGO_PKG_VERSION"));
}

/// Prints the usage message for the `consolidate` command on the standard output.
fn print_consolidate_usage() {
    print!(concat!(
        "Usage: ksymtypes consolidate [OPTION...] PATH\n",
        "Consolidate symtypes into a single file.\n",
        "\n",
        "Options:\n",
        "  -h, --help                    display this help and exit\n",
        "  -j NUM, --jobs=NUM            use NUM workers to perform the operation\n",
        "  -o FILE, --output=FILE        write the result in FILE, instead of stdout\n",
    ));
}

/// Prints the usage message for the `compare` command on the standard output.
fn print_compare_usage() {
    print!(concat!(
        "Usage: ksymtypes compare [OPTION...] PATH PATH2\n",
        "Show differences between two symtypes corpuses.\n",
        "\n",
        "Options:\n",
        "  -h, --help                    display this help and exit\n",
        "  -j NUM, --jobs=NUM            use NUM workers to perform the operation\n",
    ));
}

/// Handles an option with a mandatory value.
///
/// When the `arg` matches the `short` or `long` variant, the function returns [`Ok(Some(String))`]
/// with the option value. Otherwise, [`Ok(None)`] is returned when the `arg` doesn't match, or
/// [`Err`] in case of an error.
fn handle_value_option<I: Iterator<Item = String>>(
    arg: &str,
    args: &mut I,
    short: &str,
    long: &str,
) -> Result<Option<String>, ()> {
    // Handle '-<short> <value>' and '--<long> <value>'.
    if arg == short || arg == long {
        match args.next() {
            Some(value) => return Ok(Some(value.to_string())),
            None => {
                eprintln!("Missing argument for '{}'", long);
                return Err(());
            }
        };
    }

    // Handle '-<short><value>'.
    if let Some(value) = arg.strip_prefix(short) {
        return Ok(Some(value.to_string()));
    }

    // Handle '--<long>=<value>'.
    if let Some(rem) = arg.strip_prefix(long) {
        if let Some(value) = rem.strip_prefix('=') {
            return Ok(Some(value.to_string()));
        }
    }

    Ok(None)
}

/// Handles the `-j`/`--jobs` option which specifies the number of workers to perform a given
/// operation simultaneously.
fn handle_jobs_option<I: Iterator<Item = String>>(
    arg: &str,
    args: &mut I,
) -> Result<Option<i32>, ()> {
    if let Some(value) = handle_value_option(arg, args, "-j", "--jobs")? {
        match value.parse::<i32>() {
            Ok(jobs) => {
                if jobs < 1 {
                    eprintln!("Invalid value for '{}': must be positive", arg);
                    return Err(());
                }
                return Ok(Some(jobs));
            }
            Err(err) => {
                eprintln!("Invalid value for '{}': {}", arg, err);
                return Err(());
            }
        };
    }

    Ok(None)
}

/// Handles the `consolidate` command which consolidates symtypes into a single file.
fn do_consolidate<I: IntoIterator<Item = String>>(do_timing: bool, args: I) -> Result<(), ()> {
    // Parse specific command options.
    let mut args = args.into_iter();
    let mut output = "-".to_string();
    let mut num_workers = 1;
    let mut past_dash_dash = false;
    let mut maybe_path = None;

    while let Some(arg) = args.next() {
        if !past_dash_dash {
            if let Some(value) = handle_value_option(&arg, &mut args, "-o", "--output")? {
                output = value;
                continue;
            }
            if let Some(value) = handle_jobs_option(&arg, &mut args)? {
                num_workers = value;
                continue;
            }
            if arg == "-h" || arg == "--help" {
                print_consolidate_usage();
                return Ok(());
            }
            if arg == "--" {
                past_dash_dash = true;
                continue;
            }
            if arg.starts_with('-') || arg.starts_with("--") {
                eprintln!("Unrecognized consolidate option '{}'", arg);
                return Err(());
            }
        }

        if maybe_path.is_none() {
            maybe_path = Some(arg);
            continue;
        }
        eprintln!("Excess consolidate argument '{}' specified", arg);
        return Err(());
    }

    let path = maybe_path.ok_or_else(|| {
        eprintln!("The consolidate source is missing");
    })?;

    // Do the consolidation.
    let mut syms = SymCorpus::new();

    {
        let _timing = Timing::new(do_timing, &format!("Reading symtypes from '{}'", path));

        if let Err(err) = syms.load(&path, num_workers) {
            eprintln!("Failed to read symtypes from '{}': {}", path, err);
            return Err(());
        }
    }

    {
        let _timing = Timing::new(
            do_timing,
            &format!("Writing consolidated symtypes to '{}'", output),
        );

        if let Err(err) = syms.write_consolidated(&output) {
            eprintln!(
                "Failed to write consolidated symtypes to '{}': {}",
                output, err
            );
            return Err(());
        }
    }

    Ok(())
}

/// Handles the `compare` command which shows differences between two symtypes corpuses.
fn do_compare<I: IntoIterator<Item = String>>(do_timing: bool, args: I) -> Result<(), ()> {
    // Parse specific command options.
    let mut args = args.into_iter();
    let mut num_workers = 1;
    let mut past_dash_dash = false;
    let mut maybe_path = None;
    let mut maybe_path2 = None;

    while let Some(arg) = args.next() {
        if !past_dash_dash {
            if let Some(value) = handle_jobs_option(&arg, &mut args)? {
                num_workers = value;
                continue;
            }
            if arg == "-h" || arg == "--help" {
                print_compare_usage();
                return Ok(());
            }
            if arg == "--" {
                past_dash_dash = true;
                continue;
            }
            if arg.starts_with('-') || arg.starts_with("--") {
                eprintln!("Unrecognized compare option '{}'", arg);
                return Err(());
            }
        }

        if maybe_path.is_none() {
            maybe_path = Some(arg);
            continue;
        }
        if maybe_path2.is_none() {
            maybe_path2 = Some(arg);
            continue;
        }
        eprintln!("Excess compare argument '{}' specified", arg);
        return Err(());
    }

    let path = maybe_path.ok_or_else(|| {
        eprintln!("The first compare source is missing");
    })?;
    let path2 = maybe_path2.ok_or_else(|| {
        eprintln!("The second compare source is missing");
    })?;

    // Do the comparison.
    debug!("Compare '{}' and '{}'", path, path2);

    let syms = {
        let _timing = Timing::new(do_timing, &format!("Reading symtypes from '{}'", path));

        let mut syms = SymCorpus::new();
        if let Err(err) = syms.load(&path, num_workers) {
            eprintln!("Failed to read symtypes from '{}': {}", path, err);
            return Err(());
        }
        syms
    };

    let syms2 = {
        let _timing = Timing::new(do_timing, &format!("Reading symtypes from '{}'", path2));

        let mut syms2 = SymCorpus::new();
        if let Err(err) = syms2.load(&path2, num_workers) {
            eprintln!("Failed to read symtypes from '{}': {}", path2, err);
            return Err(());
        }
        syms2
    };

    {
        let _timing = Timing::new(do_timing, "Comparison");

        if let Err(err) = syms.compare_with(&syms2, io::stdout(), num_workers) {
            eprintln!(
                "Failed to compare symtypes from '{}' and '{}': {}",
                path, path2, err
            );
            return Err(());
        }
    }

    Ok(())
}

fn main() {
    let mut args = env::args();

    // Skip over the program name.
    match args.next() {
        Some(_) => {}
        None => {
            eprintln!("Unknown program name");
            process::exit(1);
        }
    };

    // Handle global options and stop at the command.
    let mut maybe_command = None;
    let mut do_timing = false;
    let mut debug_level = 0;
    for arg in args.by_ref() {
        if arg == "-d" || arg == "--debug" {
            debug_level += 1;
            continue;
        }
        if arg == "--timing" {
            do_timing = true;
            continue;
        }

        if arg == "-h" || arg == "--help" {
            print_usage();
            process::exit(0);
        }
        if arg == "--version" {
            print_version();
            process::exit(0);
        }
        if arg.starts_with('-') || arg.starts_with("--") {
            eprintln!("Unrecognized global option '{}'", arg);
            process::exit(1);
        }
        maybe_command = Some(arg);
        break;
    }

    init_debug_level(debug_level);

    let command = match maybe_command {
        Some(command) => command,
        None => {
            eprintln!("No command specified");
            process::exit(1);
        }
    };

    // Process the specified command.
    let result = match command.as_str() {
        "consolidate" => do_consolidate(do_timing, args),
        "compare" => do_compare(do_timing, args),
        _ => {
            eprintln!("Unrecognized command '{}'", command);
            Err(())
        }
    };

    process::exit(if result.is_ok() { 0 } else { 1 });
}
07070100000014000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002D00000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/diff07070100000015000081A400000000000000000000000167CF0953000022C6000000000000000000000000000000000000003400000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/diff/mod.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use crate::MapIOErr;
use std::fmt::Display;
use std::io::{prelude::*, BufWriter};
use std::ops::{Index, IndexMut};

#[cfg(test)]
mod tests;

// Implementation of the Myers diff algorithm:
// Myers, E.W. An O(ND) difference algorithm and its variations. Algorithmica 1, 251--266 (1986).
// https://doi.org/10.1007/BF01840446

/// A step in the edit script.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum Edit {
    KeepA(usize),
    RemoveA(usize),
    InsertB(usize),
}

/// An edit script which describes how to transform `a` to `b`.
type EditScript = Vec<Edit>;

/// A limited [`Vec`] wrapper which allows indexing by `isize` in range
/// `(-self.0.len() / 2)..((self.0.len() + 1) / 2`) instead of `0..self.0.len()`.
struct IVec<T>(Vec<T>);

impl<T> Index<isize> for IVec<T> {
    type Output = T;
    fn index(&self, index: isize) -> &T {
        let real_index = (self.0.len() / 2).wrapping_add_signed(index);
        &self.0[real_index]
    }
}

impl<T> IndexMut<isize> for IVec<T> {
    fn index_mut(&mut self, index: isize) -> &mut T {
        let real_index = (self.0.len() / 2).wrapping_add_signed(index);
        &mut self.0[real_index]
    }
}

/// An edit step + an identifier of the previous steps leading to the current point during the edit
/// graph traversal.
#[derive(Clone, Copy)]
struct EditChain {
    prev: usize,
    step: Edit,
}

/// A state of a diagonal during the edit graph traversal.
#[derive(Clone)]
struct DiagonalState {
    x: usize,
    edit_index: usize,
}

/// Compares `a` with `b` and returns an edit script describing how to transform the former to the
/// latter.
fn myers<T: AsRef<str> + PartialEq>(a: &[T], b: &[T]) -> EditScript {
    let max = a.len() + b.len();
    let mut v = IVec(vec![
        DiagonalState {
            x: usize::MAX,
            edit_index: usize::MAX,
        };
        // Minimum of 3 diagonals to allow accessing `v[1].x` when the inputs are empty.
        std::cmp::max(2 * max + 1, 3)
    ]);
    v[1].x = 0;
    let mut edit_chains = Vec::new();

    for d in 0..(max as isize + 1) {
        for k in (-d..d + 1).step_by(2) {
            // Determine where to progress, insert from `b` or remove from `a`.
            let insert_b = k == -d || (k != d && v[k - 1].x < v[k + 1].x);
            let (mut x, mut edit_index) = if insert_b {
                (v[k + 1].x, v[k + 1].edit_index)
            } else {
                (v[k - 1].x + 1, v[k - 1].edit_index)
            };
            let mut y = x.wrapping_add_signed(-k);

            // Record the step in the edit script. Skip the first step in the algorithm which
            // initially brings the traversal to (0,0).
            if d != 0 {
                edit_chains.push(EditChain {
                    prev: edit_index,
                    step: if insert_b {
                        Edit::InsertB(y - 1)
                    } else {
                        Edit::RemoveA(x - 1)
                    },
                });
                edit_index = edit_chains.len() - 1;
            }

            // Look for a snake.
            while x < a.len() && y < b.len() && a[x] == b[y] {
                (x, y) = (x + 1, y + 1);
                edit_chains.push(EditChain {
                    prev: edit_index,
                    step: Edit::KeepA(x - 1),
                });
                edit_index = edit_chains.len() - 1;
            }

            // Check if the end is reached or more steps are needed.
            if x >= a.len() && y >= b.len() {
                // Traverse the edit chain and turn it into a proper edit script.
                let mut edit_script = EditScript::new();
                while edit_index != usize::MAX {
                    let edit_chain = edit_chains[edit_index];
                    edit_script.push(edit_chain.step);
                    edit_index = edit_chain.prev;
                }
                edit_script.reverse();
                return edit_script;
            }
            v[k] = DiagonalState { x, edit_index };
        }
    }
    unreachable!();
}

/// Writes a single diff hunk to the provided output stream.
fn write_hunk<W: Write>(
    hunk_pos_a: usize,
    hunk_len_a: usize,
    hunk_pos_b: usize,
    hunk_len_b: usize,
    hunk_data: &[String],
    writer: &mut BufWriter<W>,
) -> Result<(), crate::Error> {
    let err_desc = "Failed to write a diff hunk";

    writeln!(
        writer,
        "@@ -{},{} +{},{} @@",
        hunk_pos_a, hunk_len_a, hunk_pos_b, hunk_len_b
    )
    .map_io_err(err_desc)?;
    for hunk_str in hunk_data {
        writeln!(writer, "{}", hunk_str).map_io_err(err_desc)?;
    }
    Ok(())
}

/// Compares `a` with `b` and writes their unified diff to the provided output stream.
pub fn unified<T: AsRef<str> + PartialEq + Display, W: Write>(
    a: &[T],
    b: &[T],
    writer: W,
) -> Result<(), crate::Error> {
    let mut writer = BufWriter::new(writer);

    // Diff the two inputs and calculate the edit script.
    let edit_script = myers(a, b);

    // Turn the edit script into hunks in the unified format.
    const CONTEXT_SIZE: usize = 3;
    let (mut context_begin, mut context_end) = (0, 0);
    let (mut pos_a, mut pos_b) = (1, 1);
    let (mut hunk_pos_a, mut hunk_len_a, mut hunk_pos_b, mut hunk_len_b) = (0, 0, 0, 0);
    let mut hunk_data = Vec::new();

    for edit in edit_script {
        match edit {
            Edit::KeepA(index_a) => {
                // Start recording a new context, or extend the current one.
                if context_begin == context_end {
                    context_begin = index_a;
                    context_end = context_begin + 1;
                } else {
                    context_end += 1;
                }

                // Update the positions.
                pos_a += 1;
                pos_b += 1;

                // If handling a hunk, check if it should be closed off.
                if !hunk_data.is_empty() && context_end - context_begin > 2 * CONTEXT_SIZE {
                    for line in a.iter().skip(context_begin).take(CONTEXT_SIZE) {
                        hunk_data.push(format!(" {}", line));
                    }
                    hunk_len_a += CONTEXT_SIZE;
                    hunk_len_b += CONTEXT_SIZE;
                    context_begin += CONTEXT_SIZE;
                    write_hunk(
                        hunk_pos_a,
                        hunk_len_a,
                        hunk_pos_b,
                        hunk_len_b,
                        &hunk_data,
                        &mut writer,
                    )?;
                    hunk_data.clear();
                }
            }

            Edit::RemoveA(_) | Edit::InsertB(_) => {
                // Open a new hunk if not already handling one.
                if hunk_data.is_empty() {
                    if context_end - context_begin > CONTEXT_SIZE {
                        context_begin = context_end - CONTEXT_SIZE;
                    }
                    hunk_pos_a = pos_a - (context_end - context_begin);
                    hunk_len_a = 0;
                    hunk_pos_b = pos_b - (context_end - context_begin);
                    hunk_len_b = 0;
                }

                // Update the positions.
                if let Edit::RemoveA(_) = edit {
                    pos_a += 1;
                } else {
                    pos_b += 1;
                }

                // Add any accumulated context.
                for line in a.iter().take(context_end).skip(context_begin) {
                    hunk_data.push(format!(" {}", line));
                }
                hunk_len_a += context_end - context_begin;
                hunk_len_b += context_end - context_begin;
                context_begin = context_end;

                // Record the removed/added string.
                if let Edit::RemoveA(index_a) = edit {
                    hunk_data.push(format!("-{}", a[index_a]));
                    hunk_len_a += 1;
                } else if let Edit::InsertB(index_b) = edit {
                    hunk_data.push(format!("+{}", b[index_b]));
                    hunk_len_b += 1;
                }
            }
        }
    }

    // Close off the last hunk, if one is open.
    if !hunk_data.is_empty() {
        if context_end - context_begin > CONTEXT_SIZE {
            context_end = context_begin + CONTEXT_SIZE;
        }
        for line in a.iter().take(context_end).skip(context_begin) {
            hunk_data.push(format!(" {}", line));
        }
        hunk_len_a += context_end - context_begin;
        hunk_len_b += context_end - context_begin;
        write_hunk(
            hunk_pos_a,
            hunk_len_a,
            hunk_pos_b,
            hunk_len_b,
            &hunk_data,
            &mut writer,
        )?;
    }

    Ok(())
}
07070100000016000081A400000000000000000000000167CF095300001058000000000000000000000000000000000000003600000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/diff/tests.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use super::*;

#[test]
fn diff_trivial_empty() {
    // Check a situation when no operation is needed because both inputs are empty.
    let a: [&str; 0] = [];
    let b = [];
    let edit_script = myers(&a, &b);
    assert_eq!(edit_script, []);
}

#[test]
fn diff_trivial_replace() {
    // Check a situation when a complete replacement is needed.
    let a = ["X"];
    let b = ["Y"];
    let edit_script = myers(&a, &b);
    assert_eq!(edit_script, [Edit::RemoveA(0), Edit::InsertB(0)]);
}

#[test]
fn diff_trivial_insert() {
    // Check a situation when an insert operation from `b` is the only step needed.
    let a = [];
    let b = ["X"];
    let edit_script = myers(&a, &b);
    assert_eq!(edit_script, [Edit::InsertB(0)]);
}

#[test]
fn diff_trivial_remove() {
    // Check a situation when a remove operation from `a` is the only step needed.
    let a = ["X"];
    let b = [];
    let edit_script = myers(&a, &b);
    assert_eq!(edit_script, [Edit::RemoveA(0)]);
}

#[test]
fn diff_trivial_keep() {
    // Check a situation when a keep operation from `a` is the only step needed.
    let a = ["X"];
    let b = ["X"];
    let edit_script = myers(&a, &b);
    assert_eq!(edit_script, [Edit::KeepA(0)]);
}

#[test]
fn diff_insert_front() {
    // Check a situation when an insert operation at the front of `a` is needed.
    let a = ["X", "Y"];
    let b = ["W", "X", "Y"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::InsertB(0), Edit::KeepA(0), Edit::KeepA(1)]
    );
}

#[test]
fn diff_insert_middle() {
    // Check a situation when an insert operation in the middle of `a` is needed.
    let a = ["X", "Z"];
    let b = ["X", "Y", "Z"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::KeepA(0), Edit::InsertB(1), Edit::KeepA(1)]
    );
}

#[test]
fn diff_insert_end() {
    // Check a situation when an insert operation at the end of `a` is needed.
    let a = ["X", "Y"];
    let b = ["X", "Y", "Z"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::KeepA(0), Edit::KeepA(1), Edit::InsertB(2)]
    );
}

#[test]
fn diff_insert_subsequent() {
    // Check a situation when subsequent insert operations in `a` are needed.
    let a = [];
    let b = ["X", "Y", "Z"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::InsertB(0), Edit::InsertB(1), Edit::InsertB(2)]
    );
}

#[test]
fn diff_remove_front() {
    // Check a situation when a remove operation from the front of `a` is needed.
    let a = ["W", "X", "Y"];
    let b = ["X", "Y"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::RemoveA(0), Edit::KeepA(1), Edit::KeepA(2)]
    );
}

#[test]
fn diff_remove_middle() {
    // Check a situation when a remove operation from the middle of `a` is needed.
    let a = ["X", "Y", "Z"];
    let b = ["X", "Z"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::KeepA(0), Edit::RemoveA(1), Edit::KeepA(2)]
    );
}

#[test]
fn diff_remove_end() {
    // Check a situation when a remove operation from the end of `a` is needed.
    let a = ["X", "Y", "Z"];
    let b = ["X", "Y"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::KeepA(0), Edit::KeepA(1), Edit::RemoveA(2)]
    );
}

#[test]
fn diff_remove_subsequent() {
    // Check a situation when subsequent remove operations from `a` are needed.
    let a = ["X", "Y", "Z"];
    let b = [];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [Edit::RemoveA(0), Edit::RemoveA(1), Edit::RemoveA(2)]
    );
}

#[test]
fn diff_keep_subsequent() {
    // Check a situation when subsequent keep operations from `a` are needed.
    let a = ["X", "Y", "Z"];
    let b = ["W", "X", "Y"];
    let edit_script = myers(&a, &b);
    assert_eq!(
        edit_script,
        [
            Edit::InsertB(0),
            Edit::KeepA(0),
            Edit::KeepA(1),
            Edit::RemoveA(2)
        ]
    );
}
07070100000017000081A400000000000000000000000167CF095300000FBF000000000000000000000000000000000000002F00000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/lib.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::{Path, PathBuf};

pub mod diff;
pub mod sym;

/// An error type for the crate, annotating standard errors with contextual information and
/// providing custom errors.
#[derive(Debug)]
pub enum Error {
    IO {
        desc: String,
        io_err: std::io::Error,
    },
    Parse(String),
}

impl Error {
    /// Creates a new `Error::IO`.
    fn new_io(desc: &str, io_err: std::io::Error) -> Self {
        Error::IO {
            desc: desc.to_string(),
            io_err,
        }
    }

    /// Creates a new `Error::Parse`.
    fn new_parse(desc: &str) -> Self {
        Error::Parse(desc.to_string())
    }
}

impl std::error::Error for Error {}

impl std::fmt::Display for Error {
    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
        match self {
            Self::IO { desc, io_err } => {
                write!(f, "{}: ", desc)?;
                io_err.fmt(f)
            }
            Self::Parse(desc) => write!(f, "{}", desc),
        }
    }
}

/// A helper extension trait to map [`std::io::Error`] to [`crate::Error`], as
/// `write!(data).map_io_error(context)`.
trait MapIOErr {
    fn map_io_err(self, desc: &str) -> Result<(), crate::Error>;
}

impl MapIOErr for Result<(), std::io::Error> {
    fn map_io_err(self, desc: &str) -> Result<(), crate::Error> {
        self.map_err(|err| crate::Error::new_io(desc, err))
    }
}

/// A [`std::fs::File`] wrapper that tracks the file path to provide better error context.
struct PathFile {
    path: PathBuf,
    file: File,
}

impl PathFile {
    pub fn open<P: AsRef<Path>>(path: P) -> io::Result<Self> {
        Ok(Self {
            path: path.as_ref().to_path_buf(),
            file: File::open(path)?,
        })
    }

    pub fn create<P: AsRef<Path>>(path: P) -> io::Result<Self> {
        Ok(Self {
            path: path.as_ref().to_path_buf(),
            file: File::create(path)?,
        })
    }
}

impl Read for PathFile {
    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
        self.file.read(buf).map_err(|err| {
            io::Error::other(Error::new_io(
                &format!("Failed to read data from file '{}'", self.path.display()),
                err,
            ))
        })
    }
}

impl Write for PathFile {
    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
        self.file.write(buf).map_err(|err| {
            io::Error::other(Error::new_io(
                &format!("Failed to write data to file '{}'", self.path.display()),
                err,
            ))
        })
    }

    fn flush(&mut self) -> io::Result<()> {
        self.file.flush().map_err(|err| {
            io::Error::other(Error::new_io(
                &format!("Failed to flush data to file '{}'", self.path.display()),
                err,
            ))
        })
    }
}

/// Global debugging level.
pub static DEBUG_LEVEL: std::sync::OnceLock<usize> = std::sync::OnceLock::new();

/// Initializes the global debugging level, can be called only once.
pub fn init_debug_level(level: usize) {
    assert!(DEBUG_LEVEL.get().is_none());
    DEBUG_LEVEL.get_or_init(|| level);
}

/// Prints a formatted message to the standard error if debugging is enabled.
#[macro_export]
macro_rules! debug {
    ($($arg:tt)*) => {
        if *$crate::DEBUG_LEVEL.get().unwrap_or(&0) > 0 {
            eprintln!($($arg)*);
        }
    }
}

/// Asserts that the value is [`Ok(())`](Ok), indicating success.
#[cfg(any(test, doc))]
#[macro_export]
macro_rules! assert_ok {
    ($result:expr) => {
        match $result {
            Ok(()) => {}
            result => panic!("assertion failed: {:?} is not of type Ok(())", result),
        }
    };
}

/// Creates a [`Vec`] of [`String`] from a list of string literals.
#[cfg(any(test, doc))]
#[macro_export]
macro_rules! string_vec {
      ($($x:expr),* $(,)?) => (vec![$($x.to_string()),*]);
}
07070100000018000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000002C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/sym07070100000019000081A400000000000000000000000167CF09530000A6F0000000000000000000000000000000000000003300000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/sym/mod.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use crate::{debug, MapIOErr, PathFile};
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{HashMap, HashSet};
use std::io::{prelude::*, BufReader, BufWriter};
use std::iter::zip;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Mutex, RwLock};
use std::{fs, io, thread};

#[cfg(test)]
mod tests;
#[cfg(test)]
mod tests_format;

// Notes:
// [1] The module uses several HashMaps that are indexed by Strings. Rust allows to do a lookup in
//     such a HashMap using &str. Unfortunately, stable Rust (1.84) currently doesn't offer to do
//     this lookup but insert the key as String if it is missing. Depending on a specific case and
//     what is likely to produce less overhead, the code opts to turn the key already to a String on
//     the first lookup, or opts to run the search again if the key is missing and needs inserting.
// [2] HashSet in the stable Rust (1.84) doesn't provide the entry functionality. It is
//     a nightly-only experimental API and so not used by the module.

/// A token used in the description of a type.
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd)]
enum Token {
    TypeRef(String),
    Atom(String),
}

impl Token {
    /// Creates a new `Token::TypeRef`.
    fn new_typeref<S: Into<String>>(name: S) -> Self {
        Token::TypeRef(name.into())
    }

    /// Creates a new `Token::Atom`.
    fn new_atom<S: Into<String>>(name: S) -> Self {
        Token::Atom(name.into())
    }

    /// Returns the token data as a string slice.
    fn as_str(&self) -> &str {
        match self {
            Self::TypeRef(ref_name) => ref_name.as_str(),
            Self::Atom(word) => word.as_str(),
        }
    }
}

/// A sequence of tokens, describing one type.
type Tokens = Vec<Token>;

/// A collection of all variants of the same type name in a given corpus.
type TypeVariants = Vec<Tokens>;

/// A mapping from a type name to all its known variants.
type Types = HashMap<String, TypeVariants>;

/// A mapping from a symbol name to an index in `SymFiles`, specifying in which file the symbol is
/// defined.
type Exports = HashMap<String, usize>;

/// A mapping from a type name to an index in `TypeVariants`, specifying its variant in a given
/// file.
type FileRecords = HashMap<String, usize>;

/// A representation of a single `.symtypes` file.
struct SymFile {
    path: PathBuf,
    records: FileRecords,
}

/// A collection of `.symtypes` files.
type SymFiles = Vec<SymFile>;

/// A representation of a kernel ABI, loaded from `.symtypes` files.
///
/// * The `types` collection stores all types and their variants.
/// * The `files` collection records types in individual `.symtypes` files. Each type uses an index
///   to reference its variant in `types`.
/// * The `exports` collection provides all exports in the corpus. Each export uses an index to
///   reference its origin in `files`.
///
/// For instance, consider the following corpus consisting of two files `test_a.symtypes` and
/// `test_b.symtypes`:
///
/// * `test_a.symtypes`:
///
///   ```text
///   s#foo struct foo { int a ; }
///   bar int bar ( s#foo )
///   ```
///
/// * `test_b.symtypes`:
///
///   ```text
///   s#foo struct foo { UNKNOWN }
///   baz int baz ( s#foo )
///   ```
///
/// The corpus has two exports `bar` and `baz`, with each referencing structure `foo`, but with
/// different definitions, one is complete and one is incomplete.
///
/// The data would be represented as follows:
///
/// ```text
/// SymCorpus {
///     types: Types {
///         "s#foo": TypeVariants[
///             Tokens[Atom("struct"), Atom("foo"), Atom("{"), Atom("int"), Atom("a"), Atom(";"), Atom("}")],
///             Tokens[Atom("struct"), Atom("foo"), Atom("{"), Atom("UNKNOWN"), Atom("}")],
///         ],
///         "bar": TypeVariants[
///             Tokens[Atom("int"), Atom("bar"), Atom("("), TypeRef("s#foo"), Atom(")")],
///         ],
///         "baz": TypeVariants[
///             Tokens[Atom("int"), Atom("baz"), Atom("("), TypeRef("s#foo"), Atom(")")],
///         ],
///     },
///     exports: Exports {
///         "bar": 0,
///         "baz": 1,
///     },
///     files: SymFiles[
///         SymFile {
///             path: PathBuf("test_a.symtypes"),
///             records: FileRecords {
///                 "s#foo": 0,
///                 "bar": 0,
///             }
///         },
///         SymFile {
///             path: PathBuf("test_b.symtypes"),
///             records: FileRecords {
///                 "s#foo": 1,
///                 "baz": 0,
///             }
///         },
///     ],
/// }
/// ```
///
/// Note importantly that if a `Token` in `TypeVariants` is a `TypeRef` then the reference only
/// specifies a name of the target type, e.g. `s#foo` above. The actual type variant must be
/// determined based on what file is being processed. This allows to trivially merge `Tokens` and
/// limit memory needed to store the corpus. On the other hand, when comparing two `Tokens` vectors
/// for ABI equality, the code needs to consider whether all referenced subtypes are actually equal
/// as well.
#[derive(Default)]
pub struct SymCorpus {
    types: Types,
    exports: Exports,
    files: SymFiles,
}

/// A helper struct to provide synchronized access to `SymCorpus` data during parallel loading.
struct LoadContext<'a> {
    types: RwLock<&'a mut Types>,
    exports: Mutex<&'a mut Exports>,
    files: Mutex<&'a mut SymFiles>,
}

/// Type names to be present in the consolidated output, along with a mapping from their internal
/// symbol variant indices to the output variant indices.
type ConsolidateOutputTypes<'a> = HashMap<&'a str, HashMap<usize, usize>>;

/// Type names processed during consolidation for a specific file, providing for each type their
/// output variant index.
type ConsolidateFileTypes<'a> = HashMap<&'a str, usize>;

/// Changes between two corpuses, recording a tuple of each modified type's name, its old tokens and
/// its new tokens, along with a [`Vec`] of exported symbols affected by the change.
type CompareChangedTypes<'a> = HashMap<(&'a str, &'a Tokens, &'a Tokens), Vec<&'a str>>;

/// Type names processed during comparison for a specific file.
type CompareFileTypes<'a> = HashSet<&'a str>;

impl SymCorpus {
    /// Creates a new empty corpus.
    pub fn new() -> Self {
        Self {
            types: Types::new(),
            exports: Exports::new(),
            files: SymFiles::new(),
        }
    }

    /// Loads symtypes data from a given location.
    ///
    /// The `path` can point to a single `.symtypes` file or a directory. In the latter case, the
    /// function recursively collects all `.symtypes` in that directory and loads them.
    pub fn load<P: AsRef<Path>>(&mut self, path: P, num_workers: i32) -> Result<(), crate::Error> {
        let path = path.as_ref();

        // Determine if the input is a directory tree or a single symtypes file.
        let md = fs::metadata(path).map_err(|err| {
            crate::Error::new_io(&format!("Failed to query path '{}'", path.display()), err)
        })?;

        if md.is_dir() {
            // Recursively collect symtypes files within the directory.
            let mut symfiles = Vec::new();
            Self::collect_symfiles(path, "", &mut symfiles)?;

            // Load all found files.
            self.load_symfiles(path, &symfiles, num_workers)
        } else {
            // Load the single file.
            self.load_symfiles("", &[path], num_workers)
        }
    }

    /// Collects recursively all `.symtypes` files under the given root path and its subpath.
    fn collect_symfiles<P: AsRef<Path>, Q: AsRef<Path>>(
        root: P,
        sub_path: Q,
        symfiles: &mut Vec<PathBuf>,
    ) -> Result<(), crate::Error> {
        let root = root.as_ref();
        let sub_path = sub_path.as_ref();

        let path = root.join(sub_path);

        let dir_iter = fs::read_dir(&path).map_err(|err| {
            crate::Error::new_io(
                &format!("Failed to read directory '{}'", path.display()),
                err,
            )
        })?;

        for maybe_entry in dir_iter {
            let entry = maybe_entry.map_err(|err| {
                crate::Error::new_io(
                    &format!("Failed to read directory '{}'", path.display()),
                    err,
                )
            })?;

            let entry_path = entry.path();

            let md = fs::symlink_metadata(&entry_path).map_err(|err| {
                crate::Error::new_io(
                    &format!("Failed to query path '{}'", entry_path.display()),
                    err,
                )
            })?;

            if md.is_symlink() {
                continue;
            }

            let entry_sub_path = sub_path.join(entry.file_name());

            if md.is_dir() {
                Self::collect_symfiles(root, &entry_sub_path, symfiles)?;
                continue;
            }

            let ext = match entry_sub_path.extension() {
                Some(ext) => ext,
                None => continue,
            };
            if ext == "symtypes" {
                symfiles.push(entry_sub_path);
            }
        }
        Ok(())
    }

    /// Loads all specified `.symtypes` files.
    fn load_symfiles<P: AsRef<Path>, Q: AsRef<Path> + Sync>(
        &mut self,
        root: P,
        symfiles: &[Q],
        num_workers: i32,
    ) -> Result<(), crate::Error> {
        let root = root.as_ref();

        // Load data from the files.
        let next_work_idx = AtomicUsize::new(0);

        let load_context = LoadContext {
            types: RwLock::new(&mut self.types),
            exports: Mutex::new(&mut self.exports),
            files: Mutex::new(&mut self.files),
        };

        thread::scope(|s| {
            let mut workers = Vec::new();
            for _ in 0..num_workers {
                workers.push(s.spawn(|| -> Result<(), crate::Error> {
                    loop {
                        let work_idx = next_work_idx.fetch_add(1, Ordering::Relaxed);
                        if work_idx >= symfiles.len() {
                            return Ok(());
                        }
                        let sub_path = &symfiles[work_idx].as_ref();

                        let path = root.join(sub_path);
                        let file = PathFile::open(&path).map_err(|err| {
                            crate::Error::new_io(
                                &format!("Failed to open file '{}'", path.display()),
                                err,
                            )
                        })?;

                        Self::load_inner(sub_path, file, &load_context)?;
                    }
                }));
            }

            // Join all worker threads. Return the first error if any is found, others are silently
            // swallowed which is ok.
            for worker in workers {
                worker.join().unwrap()?
            }

            Ok(())
        })
    }

    /// Loads symtypes data from a specified reader.
    ///
    /// The `path` should point to a `.symtypes` file name, indicating the origin of the data.
    pub fn load_buffer<P: AsRef<Path>, R: Read>(
        &mut self,
        path: P,
        reader: R,
    ) -> Result<(), crate::Error> {
        let load_context = LoadContext {
            types: RwLock::new(&mut self.types),
            exports: Mutex::new(&mut self.exports),
            files: Mutex::new(&mut self.files),
        };

        Self::load_inner(path, reader, &load_context)?;

        Ok(())
    }

    /// Loads symtypes data from a specified reader.
    fn load_inner<P: AsRef<Path>, R: Read>(
        path: P,
        reader: R,
        load_context: &LoadContext,
    ) -> Result<(), crate::Error> {
        let path = path.as_ref();
        debug!("Loading '{}'", path.display());

        let mut records = FileRecords::new();

        // Map each variant name/index that the type has in this specific .symtypes file to one
        // which it got assigned in the entire loaded corpus.
        let mut remap: HashMap<String, HashMap<String, usize>> = HashMap::new();

        // Read all content from the file.
        let lines = match read_lines(reader) {
            Ok(lines) => lines,
            Err(err) => return Err(crate::Error::new_io("Failed to read symtypes data", err)),
        };

        // Detect whether the input is a single or consolidated symtypes file.
        let mut is_consolidated = false;
        for line in &lines {
            if line.starts_with("F#") {
                is_consolidated = true;
                break;
            }
        }

        let file_idx = if !is_consolidated {
            // Record the file early to determine its file_idx.
            let symfile = SymFile {
                path: path.to_path_buf(),
                records: FileRecords::new(),
            };

            let mut files = load_context.files.lock().unwrap();
            files.push(symfile);
            files.len() - 1
        } else {
            usize::MAX
        };

        // Track names of all entries to detect duplicates.
        let mut all_names = HashSet::new();

        // Parse all declarations.
        let mut file_indices = Vec::new();
        for (line_idx, line) in lines.iter().enumerate() {
            // Obtain a name of the record.
            let mut words = line.split_ascii_whitespace();
            let name = words.next().ok_or_else(|| {
                crate::Error::new_parse(&format!(
                    "{}:{}: Expected a record name",
                    path.display(),
                    line_idx + 1
                ))
            })?;

            // Check if the record is a duplicate of another one.
            match all_names.get(name) {
                Some(_) => {
                    return Err(crate::Error::new_parse(&format!(
                        "{}:{}: Duplicate record '{}'",
                        path.display(),
                        line_idx + 1,
                        name,
                    )))
                }
                None => all_names.insert(name.to_string()),
            };

            // Check for a file declaration and remember its index. File declarations are processed
            // later after remapping of all symbol variants is known.
            if name.starts_with("F#") {
                file_indices.push(line_idx);
                continue;
            }

            // Handle a type/export record.

            // Turn the remaining words into tokens.
            let tokens = words_into_tokens(&mut words);

            // Parse the base name and any variant name/index, which is appended as a suffix after
            // the `@` character.
            let (base_name, orig_variant_name) = if is_consolidated {
                split_type_name(name)
            } else {
                (name, &name[name.len()..])
            };

            // Insert the type into the corpus.
            let variant_idx = Self::merge_type(base_name, tokens, load_context);

            if is_consolidated {
                // Record a mapping from the original variant name/index to the new one.
                remap
                    .entry(base_name.to_string()) // [1]
                    .or_default()
                    .insert(orig_variant_name.to_string(), variant_idx);
            } else {
                // Insert the record.
                records.insert(base_name.to_string(), variant_idx);
                Self::try_insert_export(base_name, file_idx, line_idx, load_context)?;
            }
        }

        // TODO Validate all references?

        if !is_consolidated {
            // Update the file records.
            let mut files = load_context.files.lock().unwrap();
            files[file_idx].records = records;
            return Ok(());
        }

        // Consolidated file needs more work.

        // Handle file declarations.
        for line_idx in file_indices {
            let mut words = lines[line_idx].split_ascii_whitespace();

            let record_name = words.next().unwrap();
            assert!(record_name.starts_with("F#"));
            let file_name = &record_name[2..];

            let file_idx = {
                let symfile = SymFile {
                    path: Path::new(file_name).to_path_buf(),
                    records: FileRecords::new(),
                };
                let mut files = load_context.files.lock().unwrap();
                files.push(symfile);
                files.len() - 1
            };

            let mut records = FileRecords::new();
            for type_name in words {
                // Parse the base name and variant name/index.
                let (base_name, orig_variant_name) = split_type_name(type_name);

                // Look up how the variant got remapped.
                let variant_idx = *remap
                    .get(base_name)
                    .and_then(|hash| hash.get(orig_variant_name))
                    .ok_or_else(|| {
                        crate::Error::new_parse(&format!(
                            "{}:{}: Type '{}' is not known",
                            path.display(),
                            line_idx + 1,
                            type_name
                        ))
                    })?;

                // Insert the record.
                records.insert(base_name.to_string(), variant_idx);
                Self::try_insert_export(base_name, file_idx, line_idx, load_context)?;
            }

            // Add implicit references, ones that were omitted by the F# declaration because only
            // one variant exists in the entire consolidated file.
            let walk_records: Vec<_> = records.iter().map(|(k, v)| (k.clone(), *v)).collect();
            for (name, variant_idx) in walk_records {
                let types = load_context.types.read().unwrap();
                Self::extrapolate_file_record(
                    path,
                    file_name,
                    &name,
                    variant_idx,
                    true,
                    *types,
                    &mut records,
                )?;
            }

            let mut files = load_context.files.lock().unwrap();
            files[file_idx].records = records;
        }

        Ok(())
    }

    /// Adds the given type definition to the corpus if not already present, and returns its variant
    /// index.
    fn merge_type(type_name: &str, tokens: Tokens, load_context: &LoadContext) -> usize {
        let mut types = load_context.types.write().unwrap();
        match types.get_mut(type_name) {
            Some(variants) => {
                for (i, variant) in variants.iter().enumerate() {
                    if tokens == *variant {
                        return i;
                    }
                }
                variants.push(tokens);
                variants.len() - 1
            }
            None => {
                types.insert(type_name.to_string(), vec![tokens]); // [1]
                0
            }
        }
    }

    /// Checks if a specified `type_name` is an export and, if so, registers it with its `file_idx`
    /// in the `load_context.exports`.
    fn try_insert_export(
        type_name: &str,
        file_idx: usize,
        line_idx: usize,
        load_context: &LoadContext,
    ) -> Result<(), crate::Error> {
        if !is_export_name(type_name) {
            return Ok(());
        }

        // Try to add the export, return an error if it is a duplicate.
        let other_file_idx = {
            let mut exports = load_context.exports.lock().unwrap();
            match exports.entry(type_name.to_string()) // [1]
            {
                Occupied(export_entry) => *export_entry.get(),
                Vacant(export_entry) => {
                    export_entry.insert(file_idx);
                    return Ok(());
                }
            }
        };

        let files = load_context.files.lock().unwrap();
        let path = &files[file_idx].path;
        let other_path = &files[other_file_idx].path;
        Err(crate::Error::new_parse(&format!(
            "{}:{}: Export '{}' is duplicate. Previous occurrence found in '{}'.",
            path.display(),
            line_idx + 1,
            type_name,
            other_path.display()
        )))
    }

    /// Processes a single symbol in some file originated from an `F#` record and enhances the
    /// specified file records with the needed implicit types.
    ///
    /// This function is used when reading a consolidated input file and processing its `F#`
    /// records. Each `F#` record is in form `F#<filename> <type@variant>... <export>...`. It lists
    /// all types and exports in a given file but is allowed to omit any referenced types which have
    /// only one variant in the whole consolidated file. The purpose of this function is to find all
    /// such implicit references and add them to `records`.
    ///
    /// A caller of this function should pre-fill `records` with all explicit references given on
    /// the processed `F#` record and then call this function on each of the references. These root
    /// calls should be invoked with `is_explicit` set to `true`. The function then recursively adds
    /// all needed implicit types which are referenced from these roots.
    fn extrapolate_file_record(
        corpus_path: &Path,
        file_name: &str,
        name: &str,
        variant_idx: usize,
        is_explicit: bool,
        types: &Types,
        records: &mut FileRecords,
    ) -> Result<(), crate::Error> {
        if is_explicit {
            // All explicit symbols need to be added by the caller.
            assert!(records.get(name).is_some());
        } else {
            // A symbol can be implicit only if it has one variant.
            assert!(variant_idx == 0);

            // See if the symbol was already processed.
            if records.get(name).is_some() {
                return Ok(());
            }
            records.insert(name.to_string(), variant_idx); // [1]
        }

        // Obtain tokens for the selected variant and check it is correctly specified.
        let variants = types.get(name).unwrap();
        assert!(!variants.is_empty());
        if !is_explicit && variants.len() > 1 {
            return Err(crate::Error::new_parse(&format!(
                "{}: Type '{}' is implicitly referenced by file '{}' but has multiple variants in the corpus",
                corpus_path.display(),
                name,
                file_name,
            )));
        }
        let tokens = &variants[variant_idx];

        // Process recursively all types referenced by this symbol.
        for token in tokens {
            match token {
                Token::TypeRef(ref_name) => {
                    // Process the type. Note that passing variant_idx=0 is ok here:
                    // * If the type is explicitly specified in the parent F# record then it must be
                    //   already added in the records and the called function immediately returns.
                    // * If the type is implicit then it can have only one variant and so only
                    //   variant_idx=0 can be correct. The invoked function will check that no more
                    //   than one variant is actually present.
                    Self::extrapolate_file_record(
                        corpus_path,
                        file_name,
                        ref_name,
                        0,
                        false,
                        types,
                        records,
                    )?;
                }
                Token::Atom(_word) => {}
            }
        }

        Ok(())
    }

    /// Processes a single symbol specified in a given file and adds it to the consolidated output.
    ///
    /// The specified symbol is added to `output_types` and `processed_types`, if not already
    /// present, and all its type references get recursively processed in the same way.
    fn consolidate_type<'a>(
        &'a self,
        symfile: &SymFile,
        name: &'a str,
        output_types: &mut ConsolidateOutputTypes<'a>,
        processed_types: &mut ConsolidateFileTypes<'a>,
    ) {
        // See if the symbol was already processed.
        let processed_entry = match processed_types.entry(name) {
            Occupied(_) => return,
            Vacant(processed_entry) => processed_entry,
        };

        // Look up the internal variant index.
        let variant_idx = match symfile.records.get(name) {
            Some(&variant_idx) => variant_idx,
            None => panic!(
                "Type '{}' is not known in file '{}'",
                name,
                symfile.path.display()
            ),
        };

        // Determine the output variant index for the symbol.
        let remap_idx;
        match output_types.entry(name) {
            Occupied(mut active_entry) => {
                let remap = active_entry.get_mut();
                let remap_len = remap.len();
                match remap.entry(variant_idx) {
                    Occupied(remap_entry) => {
                        remap_idx = *remap_entry.get();
                    }
                    Vacant(remap_entry) => {
                        remap_idx = remap_len;
                        remap_entry.insert(remap_idx);
                    }
                }
            }
            Vacant(active_entry) => {
                remap_idx = 0;
                active_entry.insert(HashMap::from([(variant_idx, remap_idx)]));
            }
        };
        processed_entry.insert(remap_idx);

        // Process recursively all types that the symbol references.
        let variants = match self.types.get(name) {
            Some(variants) => variants,
            None => panic!("Type '{}' has a missing declaration", name),
        };

        for token in &variants[variant_idx] {
            match token {
                Token::TypeRef(ref_name) => {
                    self.consolidate_type(symfile, ref_name, output_types, processed_types)
                }
                Token::Atom(_word) => {}
            }
        }
    }

    /// Writes the corpus in the consolidated form into a specified file.
    pub fn write_consolidated<P: AsRef<Path>>(&self, path: P) -> Result<(), crate::Error> {
        let path = path.as_ref();

        // Open the output file.
        let writer: Box<dyn Write> = if path == Path::new("-") {
            Box::new(io::stdout())
        } else {
            match PathFile::create(path) {
                Ok(file) => Box::new(file),
                Err(err) => {
                    return Err(crate::Error::new_io(
                        &format!("Failed to create file '{}'", path.display()),
                        err,
                    ))
                }
            }
        };

        self.write_consolidated_buffer(writer)
    }

    /// Writes the corpus in the consolidated form to the provided output stream.
    pub fn write_consolidated_buffer<W: Write>(&self, writer: W) -> Result<(), crate::Error> {
        let mut writer = BufWriter::new(writer);

        // Initialize output data. Variable output_types records all output symbols, file_types
        // provides per-file information.
        let mut output_types = ConsolidateOutputTypes::new();
        let mut file_types = vec![ConsolidateFileTypes::new(); self.files.len()];

        // Sort all files in the corpus by their path.
        let mut file_indices = (0..self.files.len()).collect::<Vec<_>>();
        file_indices.sort_by_key(|&i| &self.files[i].path);

        // Process the sorted files and add their needed types to the output.
        for &i in &file_indices {
            let symfile = &self.files[i];

            // Collect sorted exports in the file which are the roots for consolidation.
            let mut exports = Vec::new();
            for name in symfile.records.keys() {
                if is_export_name(name) {
                    exports.push(name.as_str());
                }
            }
            exports.sort();

            // Add the exported types and their needed types to the output.
            let mut processed_types = ConsolidateFileTypes::new();
            for name in &exports {
                self.consolidate_type(symfile, name, &mut output_types, &mut processed_types);
            }
            file_types[i] = processed_types;
        }

        // Go through all files and their output types. Check if a given type has only one variant
        // in the output and mark it as such.
        for file_types_item in &mut file_types {
            for (name, remap_idx) in file_types_item {
                let remap = output_types.get(name).unwrap();
                if remap.len() == 1 {
                    *remap_idx = usize::MAX;
                }
            }
        }

        // Sort all output types and write them to the specified file.
        let mut sorted_records = output_types.into_iter().collect::<Vec<_>>();
        sorted_records.sort_by_key(|(name, _remap)| (is_export_name(name), *name));

        let err_desc = "Failed to write a consolidated record";

        for (name, remap) in sorted_records {
            let variants = self.types.get(name).unwrap();
            let mut sorted_remap = remap
                .iter()
                .map(|(&variant_idx, &remap_idx)| (remap_idx, variant_idx))
                .collect::<Vec<_>>();
            sorted_remap.sort();

            let needs_suffix = sorted_remap.len() > 1;
            for (remap_idx, variant_idx) in sorted_remap {
                let tokens = &variants[variant_idx];

                if needs_suffix {
                    write!(writer, "{}@{}", name, remap_idx).map_io_err(err_desc)?;
                } else {
                    write!(writer, "{}", name).map_io_err(err_desc)?;
                }
                for token in tokens {
                    write!(writer, " {}", token.as_str()).map_io_err(err_desc)?;
                }
                writeln!(writer).map_io_err(err_desc)?;
            }
        }

        // Write file records.
        for &i in &file_indices {
            let symfile = &self.files[i];

            // TODO Sorting, make same as above.
            let mut sorted_types = file_types[i]
                .iter()
                .map(|(&name, &remap_idx)| (is_export_name(name), name, remap_idx))
                .collect::<Vec<_>>();
            sorted_types.sort();

            // Output the F# record in form `F#<filename> <type@variant>... <export>...`. Types with
            // only one variant in the entire consolidated file can be skipped because they can be
            // implicitly determined by a reader.
            write!(writer, "F#{}", symfile.path.display()).map_io_err(err_desc)?;
            for &(_, name, remap_idx) in &sorted_types {
                if remap_idx != usize::MAX {
                    write!(writer, " {}@{}", name, remap_idx).map_io_err(err_desc)?;
                } else if is_export_name(name) {
                    write!(writer, " {}", name).map_io_err(err_desc)?;
                }
            }
            writeln!(writer).map_io_err(err_desc)?;
        }
        Ok(())
    }

    /// Obtains tokens which describe a specified type name, in a given corpus and file.
    fn get_type_tokens<'a>(symtypes: &'a SymCorpus, file: &SymFile, name: &str) -> &'a Tokens {
        match file.records.get(name) {
            Some(&variant_idx) => match symtypes.types.get(name) {
                Some(variants) => &variants[variant_idx],
                None => {
                    panic!("Type '{}' has a missing declaration", name);
                }
            },
            None => {
                panic!(
                    "Type '{}' is not known in file '{}'",
                    name,
                    file.path.display()
                )
            }
        }
    }

    /// Compares the definition of the symbol `name` in (`corpus`, `file`) with its definition in
    /// (`other_corpus`, `other_file`).
    ///
    /// If the immediate definition of the symbol differs between the two corpuses then it gets
    /// added in `changes`. The `export` parameter identifies the top-level exported symbol affected
    /// by the change.
    ///
    /// The specified symbol is added to `processed_types`, if not already present, and all its type
    /// references get recursively processed in the same way.
    fn compare_types<'a>(
        (corpus, file): (&'a SymCorpus, &'a SymFile),
        (other_corpus, other_file): (&'a SymCorpus, &'a SymFile),
        name: &'a str,
        export: &'a str,
        changes: &Mutex<CompareChangedTypes<'a>>,
        processed: &mut CompareFileTypes<'a>,
    ) {
        // See if the symbol was already processed.
        if processed.get(name).is_some() {
            return;
        }
        processed.insert(name); // [2]

        // Look up how the symbol is defined in each corpus.
        let tokens = Self::get_type_tokens(corpus, file, name);
        let other_tokens = Self::get_type_tokens(other_corpus, other_file, name);

        // Compare the immediate tokens.
        let is_equal = tokens.len() == other_tokens.len()
            && zip(tokens.iter(), other_tokens.iter())
                .all(|(token, other_token)| token == other_token);
        if !is_equal {
            let mut changes = changes.lock().unwrap();
            changes
                .entry((name, tokens, other_tokens))
                .or_default()
                .push(export);
        }

        // Compare recursively same referenced types. This can be done trivially if the tokens are
        // equal. If they are not, try hard (and slowly) to find any matching types.
        if is_equal {
            for token in tokens {
                if let Token::TypeRef(ref_name) = token {
                    Self::compare_types(
                        (corpus, file),
                        (other_corpus, other_file),
                        ref_name.as_str(),
                        export,
                        changes,
                        processed,
                    );
                }
            }
        } else {
            for token in tokens {
                if let Token::TypeRef(ref_name) = token {
                    for other_token in other_tokens {
                        if let Token::TypeRef(other_ref_name) = other_token {
                            if ref_name == other_ref_name {
                                Self::compare_types(
                                    (corpus, file),
                                    (other_corpus, other_file),
                                    ref_name.as_str(),
                                    export,
                                    changes,
                                    processed,
                                );
                                break;
                            }
                        }
                    }
                }
            }
        }
    }

    /// Compares symbols in the `self` and `other_corpus`.
    ///
    /// A human-readable report about all found changes is written to the provided output stream.
    pub fn compare_with<W: Write>(
        &self,
        other_corpus: &SymCorpus,
        writer: W,
        num_workers: i32,
    ) -> Result<(), crate::Error> {
        let mut writer = BufWriter::new(writer);
        let err_desc = "Failed to write a comparison result";

        // Check for symbols in self but not in other_corpus, and vice versa.
        for (exports_a, exports_b, change) in [
            (&self.exports, &other_corpus.exports, "removed"),
            (&other_corpus.exports, &self.exports, "added"),
        ] {
            for name in exports_a.keys() {
                if !exports_b.contains_key(name) {
                    writeln!(writer, "Export '{}' has been {}", name, change)
                        .map_io_err(err_desc)?;
                }
            }
        }

        // Compare symbols that are in both corpuses.
        let works: Vec<_> = self.exports.iter().collect();
        let next_work_idx = AtomicUsize::new(0);

        let changes = Mutex::new(CompareChangedTypes::new());

        thread::scope(|s| {
            for _ in 0..num_workers {
                s.spawn(|| loop {
                    let work_idx = next_work_idx.fetch_add(1, Ordering::Relaxed);
                    if work_idx >= works.len() {
                        break;
                    }
                    let (name, file_idx) = works[work_idx];

                    let file = &self.files[*file_idx];
                    if let Some(other_file_idx) = other_corpus.exports.get(name) {
                        let other_file = &other_corpus.files[*other_file_idx];
                        let mut processed = CompareFileTypes::new();
                        Self::compare_types(
                            (self, file),
                            (other_corpus, other_file),
                            name,
                            name,
                            &changes,
                            &mut processed,
                        );
                    }
                });
            }
        });

        // Format and output collected changes.
        let changes = changes.into_inner().unwrap(); // Get the inner HashMap.
        let mut changes = changes.into_iter().collect::<Vec<_>>();
        changes.iter_mut().for_each(|(_, exports)| exports.sort());
        changes.sort();

        let mut add_separator = false;
        for ((name, tokens, other_tokens), exports) in changes {
            // Add an empty line to separate individual changes.
            if add_separator {
                writeln!(writer).map_io_err(err_desc)?;
            } else {
                add_separator = true;
            }

            writeln!(
                writer,
                "The following '{}' exports are different:",
                exports.len()
            )
            .map_io_err(err_desc)?;
            for export in exports {
                writeln!(writer, " {}", export).map_io_err(err_desc)?;
            }
            writeln!(writer).map_io_err(err_desc)?;

            writeln!(writer, "because of a changed '{}':", name).map_io_err(err_desc)?;
            write_type_diff(tokens, other_tokens, writer.by_ref())?;
        }

        Ok(())
    }
}

/// Reads data from a specified reader and returns its content as a [`Vec`] of [`String`] lines.
fn read_lines<R: Read>(reader: R) -> io::Result<Vec<String>> {
    let reader = BufReader::new(reader);
    let mut lines = Vec::new();
    for maybe_line in reader.lines() {
        match maybe_line {
            Ok(line) => lines.push(line),
            Err(err) => return Err(err),
        };
    }
    Ok(lines)
}

/// Reads words from a given iterator and converts them to `Tokens`.
fn words_into_tokens<'a, I: Iterator<Item = &'a str>>(words: &mut I) -> Tokens {
    let mut tokens = Tokens::new();
    for word in words {
        let mut is_typeref = false;
        if let Some(ch) = word.chars().nth(1) {
            if ch == '#' {
                is_typeref = true;
            }
        }
        tokens.push(if is_typeref {
            Token::new_typeref(word)
        } else {
            Token::new_atom(word)
        });
    }
    tokens
}

/// Returns whether the specified type name is an export definition, as opposed to a `<X>#<foo>`
/// type definition.
fn is_export_name(type_name: &str) -> bool {
    match type_name.chars().nth(1) {
        Some(ch) => ch != '#',
        None => true,
    }
}

/// Splits the specified type name into a tuple of two string slices, with the first one being the
/// base name and the second one containing the variant name/index (or an empty string if no variant
/// was present).
fn split_type_name(type_name: &str) -> (&str, &str) {
    match type_name.rfind('@') {
        Some(i) => (&type_name[..i], &type_name[i + 1..]),
        None => (type_name, &type_name[type_name.len()..]),
    }
}

/// Processes tokens describing a type and produces its pretty-formatted version as a [`Vec`] of
/// [`String`] lines.
fn pretty_format_type(tokens: &Tokens) -> Vec<String> {
    // Iterate over all tokens and produce the formatted output.
    let mut res = Vec::new();
    let mut indent: usize = 0;

    let mut line = String::new();
    for token in tokens {
        // Handle the closing bracket and parenthesis early, they end any prior line and reduce
        // indentation.
        if token.as_str() == "}" || token.as_str() == ")" {
            if !line.is_empty() {
                res.push(line);
            }
            indent = indent.saturating_sub(1);
            line = String::new();
        }

        // Insert any newline indentation.
        let is_first = line.is_empty();
        if is_first {
            for _ in 0..indent {
                line.push('\t');
            }
        }

        // Check if the token is special and append it appropriately to the output.
        match token.as_str() {
            "{" | "(" => {
                if !is_first {
                    line.push(' ');
                }
                line.push_str(token.as_str());
                res.push(line);
                indent = indent.saturating_add(1);

                line = String::new();
            }
            "}" | ")" => {
                line.push_str(token.as_str());
            }
            ";" => {
                line.push(';');
                res.push(line);

                line = String::new();
            }
            "," => {
                line.push(',');
                res.push(line);

                line = String::new();
            }
            _ => {
                if !is_first {
                    line.push(' ');
                }
                line.push_str(token.as_str());
            }
        };
    }

    if !line.is_empty() {
        res.push(line);
    }

    res
}

/// Formats a unified diff between two supposedly different types and writes it to the provided
/// output stream.
fn write_type_diff<W: Write>(
    tokens: &Tokens,
    other_tokens: &Tokens,
    writer: W,
) -> Result<(), crate::Error> {
    let pretty = pretty_format_type(tokens);
    let other_pretty = pretty_format_type(other_tokens);
    crate::diff::unified(&pretty, &other_pretty, writer)
}
0707010000001A000081A400000000000000000000000167CF095300002F7B000000000000000000000000000000000000003500000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/sym/tests.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use super::*;
use crate::assert_ok;

macro_rules! assert_parse_err {
    ($result:expr, $exp_desc:expr) => {
        match $result {
            Err(crate::Error::Parse(actual_desc)) => assert_eq!(actual_desc, $exp_desc),
            result => panic!(
                "assertion failed: {:?} is not of type Err(crate::Error::Parse())",
                result
            ),
        }
    };
}

#[test]
fn read_empty_record() {
    // Check that empty records are rejected when reading a file.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "s#test struct test { }\n",
            "\n",
            "s#test2 struct test2 { }\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test.symtypes:2: Expected a record name");
}

#[test]
fn read_duplicate_type_record() {
    // Check that type records with duplicate names are rejected when reading a file.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "s#test struct test { int a ; }\n",
            "s#test struct test { int b ; }\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test.symtypes:2: Duplicate record 's#test'");
}

#[test]
fn read_duplicate_file_record() {
    // Check that F# records with duplicate names are rejected when reading a consolidated file.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "bar int bar ( )\n",
            "baz int baz ( )\n",
            "F#test.symtypes bar\n",
            "F#test.symtypes baz\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(
        result,
        "test.symtypes:4: Duplicate record 'F#test.symtypes'"
    );
}

#[test]
fn read_invalid_file_record_ref() {
    // Check that an F# record referencing a type in form '<base_name>' is rejected if the type is
    // not known.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "F#test.symtypes bar\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test.symtypes:1: Type 'bar' is not known");
}

#[test]
fn read_invalid_file_record_ref2() {
    // Check that an F# record referencing a type in form '<base_name>@<variant_idx>' is rejected if
    // the base name is not known.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "F#test.symtypes bar@0\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test.symtypes:1: Type 'bar@0' is not known");
}

#[test]
fn read_invalid_file_record_ref3() {
    // Check that an F# record referencing a type in form '<base_name>@<variant_idx>' is rejected if
    // the variant index is not known.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "bar@0 int bar ( )\n",
            "F#test.symtypes bar@0\n",
            "F#test2.symtypes bar@1\n", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test.symtypes:3: Type 'bar@1' is not known");
}

#[test]
fn read_duplicate_type_export() {
    // Check that two exports with the same name in different files get rejected.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "foo int foo ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let result = syms.load_buffer(
        "test2.symtypes",
        concat!(
            "foo int foo ( )", //
        )
        .as_bytes(),
    );
    assert_parse_err!(result, "test2.symtypes:1: Export 'foo' is duplicate. Previous occurrence found in 'test.symtypes'.");
}

#[test]
fn read_write_basic() {
    // Check reading of a single file and writing the consolidated output.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.write_consolidated_buffer(&mut out);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n",
            "F#test.symtypes bar\n", //
        )
    );
}

#[test]
fn read_write_shared_struct() {
    // Check that a structure declaration shared by two files appears only once in the consolidated
    // output.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let result = syms.load_buffer(
        "test2.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "baz int baz ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.write_consolidated_buffer(&mut out);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n",
            "baz int baz ( s#foo )\n",
            "F#test.symtypes bar\n",
            "F#test2.symtypes baz\n", //
        )
    );
}

#[test]
fn read_write_differing_struct() {
    // Check that a structure declaration different in two files appears in all variants in the
    // consolidated output and they are correctly referenced by the F# entries.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "test.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let result = syms.load_buffer(
        "test2.symtypes",
        concat!(
            "s#foo struct foo { UNKNOWN }\n",
            "baz int baz ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.write_consolidated_buffer(&mut out);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "s#foo@0 struct foo { int a ; }\n",
            "s#foo@1 struct foo { UNKNOWN }\n",
            "bar int bar ( s#foo )\n",
            "baz int baz ( s#foo )\n",
            "F#test.symtypes s#foo@0 bar\n",
            "F#test2.symtypes s#foo@1 baz\n", //
        )
    );
}

#[test]
fn compare_identical() {
    // Check that the comparison of two identical corpuses shows no differences.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "a/test.symtypes",
        concat!(
            "bar int bar ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut syms2 = SymCorpus::new();
    let result = syms2.load_buffer(
        "b/test.symtypes",
        concat!(
            "bar int bar ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.compare_with(&syms2, &mut out, 1);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "", //
        )
    );
}

#[test]
fn compare_added_export() {
    // Check that the comparison of two corpuses reports any newly added export.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "a/test.symtypes",
        concat!(
            "bar int bar ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut syms2 = SymCorpus::new();
    let result = syms2.load_buffer(
        "b/test.symtypes",
        concat!(
            "bar int bar ( )\n",
            "baz int baz ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.compare_with(&syms2, &mut out, 1);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "Export 'baz' has been added\n", //
        )
    );
}

#[test]
fn compare_removed_export() {
    // Check that the comparison of two corpuses reports any removed export.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "a/test.symtypes",
        concat!(
            "bar int bar ( )\n",
            "baz int baz ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut syms2 = SymCorpus::new();
    let result = syms2.load_buffer(
        "b/test.symtypes",
        concat!(
            "baz int baz ( )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.compare_with(&syms2, &mut out, 1);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "Export 'bar' has been removed\n", //
        )
    );
}

#[test]
fn compare_changed_type() {
    // Check that the comparison of two corpuses reports changed types and affected exports.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "a/test.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut syms2 = SymCorpus::new();
    let result = syms2.load_buffer(
        "b/test.symtypes",
        concat!(
            "s#foo struct foo { int a ; int b ; }\n",
            "bar int bar ( s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.compare_with(&syms2, &mut out, 1);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "The following '1' exports are different:\n",
            " bar\n",
            "\n",
            "because of a changed 's#foo':\n",
            "@@ -1,3 +1,4 @@\n",
            " struct foo {\n",
            " \tint a;\n",
            "+\tint b;\n",
            " }\n", //
        )
    );
}

#[test]
fn compare_changed_nested_type() {
    // Check that the comparison of two corpuses reports also changes in subtypes even if the parent
    // type itself is modified, as long as each subtype is referenced by the parent type in both
    // inputs.
    let mut syms = SymCorpus::new();
    let result = syms.load_buffer(
        "a/test.symtypes",
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( int a , s#foo )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut syms2 = SymCorpus::new();
    let result = syms2.load_buffer(
        "b/test.symtypes",
        concat!(
            "s#foo struct foo { int a ; int b ; }\n",
            "bar int bar ( s#foo , int a )\n", //
        )
        .as_bytes(),
    );
    assert_ok!(result);
    let mut out = Vec::new();
    let result = syms.compare_with(&syms2, &mut out, 1);
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "The following '1' exports are different:\n",
            " bar\n",
            "\n",
            "because of a changed 'bar':\n",
            "@@ -1,4 +1,4 @@\n",
            " int bar (\n",
            "-\tint a,\n",
            "-\ts#foo\n",
            "+\ts#foo,\n",
            "+\tint a\n",
            " )\n",
            "\n",
            "The following '1' exports are different:\n",
            " bar\n",
            "\n",
            "because of a changed 's#foo':\n",
            "@@ -1,3 +1,4 @@\n",
            " struct foo {\n",
            " \tint a;\n",
            "+\tint b;\n",
            " }\n", //
        )
    );
}
0707010000001B000081A400000000000000000000000167CF0953000040A1000000000000000000000000000000000000003C00000000suse-kabi-tools-0.2.0+git12.d8ab89d/src/sym/tests_format.rs// Copyright (C) 2024 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use super::*;
use crate::assert_ok;

#[test]
fn format_typedef() {
    // Check the pretty format of a typedef declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("typedef"),
        Token::new_atom("unsigned"),
        Token::new_atom("long"),
        Token::new_atom("long"),
        Token::new_atom("u64"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "typedef unsigned long long u64", //
        )
    );
}

#[test]
fn format_enum() {
    // Check the pretty format of an enum declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("enum"),
        Token::new_atom("test"),
        Token::new_atom("{"),
        Token::new_atom("VALUE1"),
        Token::new_atom(","),
        Token::new_atom("VALUE2"),
        Token::new_atom(","),
        Token::new_atom("VALUE3"),
        Token::new_atom("}"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "enum test {",
            "\tVALUE1,",
            "\tVALUE2,",
            "\tVALUE3",
            "}", //
        )
    );
}

#[test]
fn format_struct() {
    // Check the pretty format of a struct declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("struct"),
        Token::new_atom("test"),
        Token::new_atom("{"),
        Token::new_atom("int"),
        Token::new_atom("ivalue"),
        Token::new_atom(";"),
        Token::new_atom("long"),
        Token::new_atom("lvalue"),
        Token::new_atom(";"),
        Token::new_atom("}"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "struct test {",
            "\tint ivalue;",
            "\tlong lvalue;",
            "}", //
        )
    );
}

#[test]
fn format_union() {
    // Check the pretty format of a union declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("union"),
        Token::new_atom("test"),
        Token::new_atom("{"),
        Token::new_atom("int"),
        Token::new_atom("ivalue"),
        Token::new_atom(";"),
        Token::new_atom("long"),
        Token::new_atom("lvalue"),
        Token::new_atom(";"),
        Token::new_atom("}"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "union test {",
            "\tint ivalue;",
            "\tlong lvalue;",
            "}", //
        )
    );
}

#[test]
fn format_function() {
    // Check the pretty format of a function declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("void"),
        Token::new_atom("test"),
        Token::new_atom("("),
        Token::new_atom("int"),
        Token::new_atom("ivalue"),
        Token::new_atom(","),
        Token::new_atom("long"),
        Token::new_atom("lvalue"),
        Token::new_atom(")"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "void test (",
            "\tint ivalue,",
            "\tlong lvalue",
            ")", //
        )
    );
}

#[test]
fn format_enum_constant() {
    // Check the pretty format of an enum constant declaration.
    let pretty = pretty_format_type(&vec![Token::new_atom("7")]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "7", //
        )
    );
}

#[test]
fn format_nested() {
    // Check the pretty format of a nested declaration.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("union"),
        Token::new_atom("nested"),
        Token::new_atom("{"),
        Token::new_atom("struct"),
        Token::new_atom("{"),
        Token::new_atom("int"),
        Token::new_atom("ivalue1"),
        Token::new_atom(";"),
        Token::new_atom("int"),
        Token::new_atom("ivalue2"),
        Token::new_atom(";"),
        Token::new_atom("}"),
        Token::new_atom(";"),
        Token::new_atom("long"),
        Token::new_atom("lvalue"),
        Token::new_atom(";"),
        Token::new_atom("}"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "union nested {",
            "\tstruct {",
            "\t\tint ivalue1;",
            "\t\tint ivalue2;",
            "\t};",
            "\tlong lvalue;",
            "}", //
        )
    );
}

#[test]
fn format_imbalanced() {
    // Check the pretty format of a declaration with wrongly balanced brackets.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("struct"),
        Token::new_atom("imbalanced"),
        Token::new_atom("{"),
        Token::new_atom("{"),
        Token::new_atom("}"),
        Token::new_atom("}"),
        Token::new_atom("}"),
        Token::new_atom(";"),
        Token::new_atom("{"),
        Token::new_atom("{"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "struct imbalanced {",
            "\t{",
            "\t}",
            "}",
            "};",
            "{",
            "\t{", //
        )
    );
}

#[test]
fn format_typeref() {
    // Check the pretty format of a declaration with a reference to another type.
    let pretty = pretty_format_type(&vec![
        Token::new_atom("struct"),
        Token::new_atom("typeref"),
        Token::new_atom("{"),
        Token::new_typeref("s#other"),
        Token::new_atom("other"),
        Token::new_atom(";"),
        Token::new_atom("}"),
    ]);
    assert_eq!(
        pretty,
        crate::string_vec!(
            "struct typeref {",
            "\ts#other other;",
            "}", //
        )
    );
}

#[test]
fn format_removal() {
    // Check the diff format when a struct member is removed.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,4 +1,3 @@\n",
            " struct test {\n",
            " \tint ivalue1;\n",
            "-\tint ivalue2;\n",
            " }\n", //
        )
    );
}

#[test]
fn format_removal_top() {
    // Check the diff format when data is removed at the top.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
        ],
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,4 +1,3 @@\n",
            "-int ivalue1;\n",
            " int ivalue2;\n",
            " int ivalue3;\n",
            " int ivalue4;\n", //
        )
    );
}

#[test]
fn format_removal_end() {
    // Check the diff format when data is removed at the end.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
        ],
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -2,4 +2,3 @@\n",
            " int ivalue2;\n",
            " int ivalue3;\n",
            " int ivalue4;\n",
            "-int ivalue5;\n", //
        )
    );
}

#[test]
fn format_max_context() {
    // Check the diff format shows changes separated by up to 6 lines of context as one hunk.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue6"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue7"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue8"),
            Token::new_atom(";"),
        ],
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue6"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue7"),
            Token::new_atom(";"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,8 +1,6 @@\n",
            "-int ivalue1;\n",
            " int ivalue2;\n",
            " int ivalue3;\n",
            " int ivalue4;\n",
            " int ivalue5;\n",
            " int ivalue6;\n",
            " int ivalue7;\n",
            "-int ivalue8;\n", //
        )
    );
}

#[test]
fn format_max_context2() {
    // Check the diff format shows changes separated by more than 6 lines of context as two hunks.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue6"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue7"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue8"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue9"),
            Token::new_atom(";"),
        ],
        &vec![
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue3"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue4"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue5"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue6"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue7"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue8"),
            Token::new_atom(";"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,4 +1,3 @@\n",
            "-int ivalue1;\n",
            " int ivalue2;\n",
            " int ivalue3;\n",
            " int ivalue4;\n",
            "@@ -6,4 +5,3 @@\n",
            " int ivalue6;\n",
            " int ivalue7;\n",
            " int ivalue8;\n",
            "-int ivalue9;\n", //
        )
    );
}

#[test]
fn format_addition() {
    // Check the diff format when a struct member is added.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,3 +1,4 @@\n",
            " struct test {\n",
            " \tint ivalue1;\n",
            "+\tint ivalue2;\n",
            " }\n", //
        )
    );
}

#[test]
fn format_modification() {
    // Check the diff format when a struct member is modified.
    let mut out = Vec::new();
    let result = write_type_diff(
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue1"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &vec![
            Token::new_atom("struct"),
            Token::new_atom("test"),
            Token::new_atom("{"),
            Token::new_atom("int"),
            Token::new_atom("ivalue2"),
            Token::new_atom(";"),
            Token::new_atom("}"),
        ],
        &mut out,
    );
    assert_ok!(result);
    assert_eq!(
        String::from_utf8(out).unwrap(),
        concat!(
            "@@ -1,3 +1,3 @@\n",
            " struct test {\n",
            "-\tint ivalue1;\n",
            "+\tint ivalue2;\n",
            " }\n", //
        )
    );
}
0707010000001C000041ED00000000000000000000000467CF095300000000000000000000000000000000000000000000002A00000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests0707010000001D000081A400000000000000000000000167CF095300000DA7000000000000000000000000000000000000003100000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/cli.rs// Copyright (C) 2025 SUSE LLC <petr.pavlu@suse.com>
// SPDX-License-Identifier: GPL-2.0-or-later

use std::ffi::OsStr;
use std::fs;
use std::path::Path;
use std::process::{Command, ExitStatus};

struct RunResult {
    status: ExitStatus,
    stdout: String,
    stderr: String,
}

fn ksymtypes_run<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I) -> RunResult {
    let output = Command::new(env!("CARGO_BIN_EXE_ksymtypes"))
        .args(args)
        .output()
        .expect("failed to execute ksymtypes");
    RunResult {
        status: output.status,
        stdout: String::from_utf8(output.stdout).unwrap(),
        stderr: String::from_utf8(output.stderr).unwrap(),
    }
}

#[test]
fn compare_cmd() {
    // Check that the compare command trivially works.
    let result = ksymtypes_run([
        "compare",
        "tests/compare_cmd/a.symtypes",
        "tests/compare_cmd/b.symtypes",
    ]);
    assert!(result.status.success());
    assert_eq!(
        result.stdout,
        concat!(
            "The following '1' exports are different:\n",
            " foo\n",
            "\n",
            "because of a changed 'foo':\n",
            "@@ -1,3 +1,3 @@\n",
            " void foo (\n",
            "-\tint a\n",
            "+\tlong a\n",
            " )\n", //
        )
    );
    assert_eq!(result.stderr, "");
}

#[test]
fn compare_cmd_dash_dash() {
    // Check that operands of the compare command can be specified after '--'.
    let result = ksymtypes_run([
        "compare",
        "--",
        "tests/compare_cmd/a.symtypes",
        "tests/compare_cmd/b.symtypes",
    ]);
    assert!(result.status.success());
    assert_eq!(
        result.stdout,
        concat!(
            "The following '1' exports are different:\n",
            " foo\n",
            "\n",
            "because of a changed 'foo':\n",
            "@@ -1,3 +1,3 @@\n",
            " void foo (\n",
            "-\tint a\n",
            "+\tlong a\n",
            " )\n", //
        )
    );
    assert_eq!(result.stderr, "");
}

#[test]
fn consolidate_cmd() {
    // Check that the consolidate command trivially works.
    let result = ksymtypes_run(["consolidate", "tests/consolidate_cmd"]);
    assert!(result.status.success());
    assert_eq!(
        result.stdout,
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n",
            "baz int baz ( s#foo )\n",
            "F#a.symtypes bar\n",
            "F#b.symtypes baz\n", //
        )
    );
    assert_eq!(result.stderr, "");
}

#[test]
fn consolidate_cmd_output() {
    // Check that the consolidate command writes its result to the file specified by --output.
    let output_path =
        Path::new(env!("CARGO_TARGET_TMPDIR")).join("consolidate_cmd_output.symtypes");
    fs::remove_file(&output_path).ok();
    let result = ksymtypes_run([
        AsRef::<OsStr>::as_ref("consolidate"),
        "--output".as_ref(),
        &output_path.as_ref(),
        "tests/consolidate_cmd".as_ref(),
    ]);
    assert!(result.status.success());
    assert_eq!(result.stdout, "");
    assert_eq!(result.stderr, "");
    let output_data = fs::read_to_string(output_path).expect("Unable to read the output file");
    assert_eq!(
        output_data,
        concat!(
            "s#foo struct foo { int a ; }\n",
            "bar int bar ( s#foo )\n",
            "baz int baz ( s#foo )\n",
            "F#a.symtypes bar\n",
            "F#b.symtypes baz\n", //
        )
    );
}
0707010000001E000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000003600000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/compare_cmd0707010000001F000081A400000000000000000000000167CF095300000017000000000000000000000000000000000000004100000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/compare_cmd/a.symtypesfoo void foo ( int a )
07070100000020000081A400000000000000000000000167CF095300000018000000000000000000000000000000000000004100000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/compare_cmd/b.symtypesfoo void foo ( long a )
07070100000021000041ED00000000000000000000000267CF095300000000000000000000000000000000000000000000003A00000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/consolidate_cmd07070100000022000081A400000000000000000000000167CF095300000033000000000000000000000000000000000000004500000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/consolidate_cmd/a.symtypess#foo struct foo { int a ; }
bar int bar ( s#foo )
07070100000023000081A400000000000000000000000167CF095300000033000000000000000000000000000000000000004500000000suse-kabi-tools-0.2.0+git12.d8ab89d/tests/consolidate_cmd/b.symtypess#foo struct foo { int a ; }
baz int baz ( s#foo )
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!285 blocks
openSUSE Build Service is sponsored by