#!/bin/bash
#
# Copyright (C) 2013 - 2022 Jolla Ltd.
# Copyright (C) 2019 - 2020 Open Mobile Platform LLC.
# Contact: http://jolla.com/
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
#   notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
#   notice, this list of conditions and the following disclaimer in
#   the documentation and/or other materials provided with the
#   distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.

shopt -s extglob

synopsis() {
    cat <<EOF
usage: $ME [global-opts] <command> [command-opts]
       $ME --version
EOF
}

short_usage() {
    synopsis
    cat <<EOF

Try '$ME --help' for more information.
EOF
    # exit if any argument is given
    [[ -n "$1" ]] && exit 1
}

usage() {
    less --quit-if-one-screen <<EOF
$(synopsis)

mb2 provides convenient interface to Scratchbox 2, rpmbuild and more.

COMMANDS OVERVIEW
   Building software

   build-init  Initialize build directory
   build-requires  Update build time dependencies
   prepare  Prepare sources using the recipe from RPM SPEC file
   apply  Apply patches listed in the RPM SPEC file
   build  Execute all build steps at once
   qmake  Execute qmake build step
   cmake  Execute cmake build step
   make  Execute make build step
   make-install  Execute make-install build step
   package  Execute package creation build step
   build-shell  Execute custom build steps

   Testing software

   deploy  Deploy to a device
   undeploy  Undeploy from a device
   check  Perform quality checks

   Maintaining software

   scrape  Scrape modifications from sources and save them as patches

   IDE support

   compiledb  Generate compilation database


BASIC BUILDING

Start by choosing the build target.

   $ alias mb2='mb2 --target SailfishOS-4.2.0.21-aarch64'

Enter a directory and initialize it as a build directory. mb2 will create a
subdirectory named '.mb2' there, populated with build-specific data it
maintains between invocations.

   $ mkdir test && cd test
   $ mb2 build-init

Now you can open a build shell and cross-compile programs for the configured
target. mb2’s build shell allows you to run arbitrary build commands as easily
as you would do in case of native compilation. Try to cross-compile a Hello
World program.

   $ cat >hello.cpp <<END
   #include <iostream>
   int main() { std::cout << "Hello!" << std::endl; return 0; }
   END
   $ mb2 build-shell
   [SailfishOS-4.2.0.21-aarch64] $ g++ -o hello hello.cpp

Cross-compiled programs which do not have special requirements on the run-time
environment can be also run under the build shell as simply as you would do
with native programs.

   [SailfishOS-4.2.0.21-aarch64] $ ./hello
   Hello!

Alternatively, you can invoke commands under the build shell simply by
prepending them with 'mb2 build-shell'.

   $ mb2 build-shell g++ -o hello hello.cpp
   $ mb2 build-shell ./hello
   Hello!

Addional build-time dependencies can be made available under the build
environment by common means after passing the '--maintain' option to the
'build-shell' command. However, this explicit approach can be (and should be)
avoided by expressing build-time dependencies on source level.  Promote
collaboration by making as little as possible assumptions on the build
environment - treat it as something you can dispose of without loosing any
value. With all build-time dependencies expressed on source level, you and
others may build your software any time later with no extra effort. You will
learn more in the following sections.


BUILDING SOFTWARE PACKAGES

Various software packaging formats exist that allow to distribute software in
their source or binary form or both, accompanied with metadata describing
their dependencies and more. The primary packaging format used by Sailfish OS
is RPM and mb2 comes with built-in support for working with packages in this
format.

Enter a package directory. If you don’t have an existing package available,
you can use the sample application as in the following example.

   $ git clone https://github.com/sailfishos/cppqml-sample.git
   $ cd cppqml-sample

The recipe for building RPM package(s) can be found under the 'rpm' directory.
The primary packaging specification format is an RPM SPEC file (file extension
'.spec'). This is the format recognized by 'rpmbuild', the standard tool for
building RPM packages, which is also used by mb2 internally.

In addition to that, a simplified specification format is available.  Designed
with an emphasis on easy tool-assisted manipulation, it is based on the
general purpose YAML data serialization language. Packages created the above
mentioned way initially contain packaging specification in this simplified
format, based on which an RPM SPEC file will be generated automatically.
Manual additions to the generated SPEC file are possible between each pair of
'>>' and '<<' comment lines.  These additions will be preserved when the SPEC
file is regenerated after the YAML-based file is updated.

Any dependencies of your package should be recorded as part of the package
specification. With this information available, both the run-time and
build-time environment can be set up automatically to suit the needs of your
package, with no need for manual adjustments.

You can perform an all-in-one build procedure using the 'build' command.

   $ mb2 build

After successful build the resulting RPM package(s) can be found under the
RPMS directory.

   $ ls ./RPMS
   cppqml-1.0-1.aarch64.rpm

With well-written SPEC files it is possible to invoke each build phase
separately. See the documentation of the following commands for details.

   $ mb2 qmake
   $ mb2 make
   $ mb2 make-install
   $ mb2 package


WORKING WITH MULTIPLE TARGETS

When switching build target, ensure that all build artifacts created for the
previous target are removed before you start building for the new build
target. This can be achieved reliably with help of the version control system,
like in this example with git.

Attention: The following command will remove all untracked content under your
git working directory.

   $ git clean -xfd

If you followed previous sections of this manual, you can continue with the
following steps.

   $ alias mb2='mb2 --target SailfishOS-4.2.0.21-i486'
   $ git clean -xfd
   $ mb2 build

You can avoid the need to clean and rebuild when switching build targets by
doing out-of-tree builds, more commonly known as shadow builds in Qt world.
See SHADOW BUILDS further in this manual.


WORKING ON MULTIPLE PACKAGES

When modifications to two (or more) packages are needed under a task, and
there is a build-time dependency between those, it is necessary to deploy the
modified, locally built package to the build environment.  This happens
automatically when shared output directory is in use and all build-time
dependencies are recorded in the RPM SPEC file.

Shared output directory is a simple but powerful concept, not limited to the
above mentioned use case. It can be used to addres whole range of problems
related to "super-project" handling. Integrated development environments call
these "solutions", "workspaces", "sessions" etc. The terminology varies. mb2
is not that explicit about super-project handling, but it has a notion of
"task", which may denote a super-project too.

It is advisable to enable to utilize per-task shared output directories by
default, as in this example.

    $ alias mb2='mb2 --output-prefix ~/RPMS --task --target my-target'

Notice that the '--task' option is user but no task name is passed. In this
case it will be determined automatically, based on the current Git branch -
check the description of the '--task' option for more details.

With this configuration we can satisfy build-time dependencies between
packages conveniently. Consider 'packageB' with build-time dependency on
'packageA'.

    $ cd packageA
    $ git checkout -b bugXXXXX
    $ mb2 build
    $ cd ../packageB
    $ git checkout -b bugXXXXX
    $ mb2 build

Package 'packageB' was built against package 'packageA' built in the previous
step. All resulting binary packages may be found in the shared output
directory and they all can be also deployed to a device conveniently with the
'--all' option:

    $ ls -1 ~/RPMS/bugXXXXX/my-target/
    $ mb2 -d device deploy --sdk --all

We have learned how to deal with dependencies between packages. And we can
deal with dependencies between tasks (projects) too. If task 'bug12345'
depends on (or is derived from) task 'bug12300', it makes sense to chain the
changes as in

   $ cp -a --reflink=auto ~/RPMS/{bug12300,bug12345}

Similarly it is possible to supply any custom build-time dependencies by
simply copying them under a shared output directory.


CLEAN BUILDS

Build time requirements of one package may interfere with those of another
one. Packages modified and deployed under the build environment in scope of
one task may not be compatible with changes worked on in scope of other task.
As time goes the build environment accumulates various changes, leading to
generally non-reproducible build results.

Use the 'build-requires diff' command to see how the current build environment
differs from the clean build environment in terms of package installations,
removals and replacements.

Use the 'build-requires reset' command to ensure that nothing else than the
build-time dependencies currently recorded at RPM SPEC file level is pulled in
on top of the clean build environment.

Use a shared output directory to supply locally built build-time dependencies
without disturbance by (possibly implied) 'build-requires reset'. See WORKING
ON MULTIPLE PACKAGES above.

Detailed information follows.

As explained later in the MANAGING SDK BUILD TOOLS section, any changes done
under the build environment are persisted under build targets. In order to
avoid polluting build targets the abovementioned way, mb2 uses a working copy
of a build target to set up the build environment.  These working copies are
called snapshots and the '--snapshot' option controls how a snapshot is
chosen.

The original build target defines the clean state of the build environment.
The modified state is preserved across mb2 invocations unless the clean state
was updated meanwhile, in which case the next time mb2 is checking for
build-time dependencies it also resets the build environment to the updated
clean state, keeping it ahead the clean state without any action required from
you. This has the same effect as issuing the 'build-requires reset' command,
which is also the way to reset to the (possibly updated) clean state
forcefully at any time. See also the '--no-pull-build-requires' option.

If a temporary snapshot is used (see the '--snapshot' option), it will be also
reset whenever it is taken into use with other build tree.

In most respects, build target snapshots are much like regular build targets.
Most of the subcommands of the standalone 'sdk-assistant' command may be used
equally on both. By modifyng an original build target you redefine the clean
state. Depending on your use case, you may do better by cloning the original
build target and using the clone to persist the redefined clean state.

   $ sdk-assistant clone SailfishOS-4.2.0.21-{aarch64,MyDevice}
   $ sdk-assistant maintain SailfishOS-4.2.0.21-MyDevice
   ... add/rm repositories, packages etc. ...
   $ alias mb2='mb2 ... --target=SailfishOS-4.2.0.21-MyDevice'

When you are using live repositories and mb2 fails to install
build-time dependencies due to errors like "file not found on the
server", i.e., because the local repository cache got outdated, you
will most likely want to update the original target, not the snapshot.
Changes will be propagated to the snapshot automatically as described
above.


SHADOW BUILDS

CWD is used as a base dir for installroot/ and RPMS/ to allow for shadow
builds. Shadow builds can be done by passing project file or directory path to
the "build" or "qmake" (or "cmake") command:

    $ mkdir build && cd build
    $ mb2 build ../path/to/sources

    Or manually invoking each step:

    $ mkdir build && cd build
    $ mb2 qmake ../path/to/sources
    $ mb2 make
    $ mb2 package

Limitations related to shadow builds:

    1. Shadow build is only available for packages that use qmake (or CMake or
       GNU Automake).  Additionally, their build and install procedure must be
       fully realized using qmake (or CMake or GNU Automake) with no
       additional steps implemented at the SPEC file level.
    2. The "--prepare" option to the "build" command cannot be used with shadow
       builds.
    3. If "prepare" or "apply" is needed, it must be used directly on the
       source tree prior to starting a shadow build (shadow builds for
       multiple targets are still possible provided that the %prep section is
       target-independent).
    4. GNU Automake, Autoconf and related tools, in case of packages that
       invoke these at build time, still store their outputs (Makefile.in,
       configure, etc.) under the source tree.


DEPLOYING PLATFORM PACKAGES

With platform packages, subpackages specific to a particular device variant
often exist, so a careful selection of subpackages to install is needed.
Platform packages can be udated conveniently with the help of 'zypper dup',
which avoids the need for manual selection in many cases:

    mb2 -t target -d device deploy --manual --all \
        && mb2 -d device run sudo zypper -p RPMS dup --from ~plus-repo-1

A shorthand syntax exists for this approach - it is the '--zypper-dup'
deployment method:

    mb2 -t target -d device deploy --zypper-dup --all

Pass '--dry-run' to preview the effect before actually applying it.


QUALITY ASSURANCE

Certain quality checks may be performed with the help of the 'check' command:

    cd package
    mb2 build
    mb2 check

The command enables execution of (pluggable) test suites, each realizing
testing on one or more of the four recognized test levels:

    1. Static testing (verification) performed on the source tree

    2. Static testing (verification) performed on the build results (packages)

    3. Dynamic testing (validation) performed at build host (unit testing)

       Testing at this level is only supported with the built-in suite
       'rpmspec', that executes the steps defined by the '%check' section of
       the RPM SPEC file if present.

       Testing at this level is normally performed by the 'build' and
       'package' commands (unless used with the '--no-check' option) and so it
       is not enabled by default for this command. Performed with this
       command, it involves execution of the 'make-install' command.

    4. Dynamic testing (validation) performed on a target device (system testing)

       The 'check' command does not include the deployment step. Package(s)
       need to be deployed to the device selected with the '--device'
       global option beforehand, possibly with the 'deploy' command in
       case of packages associated with the build tree under CWD.

The available test suites may be listed with the '--list-suites' option to the
'check' command.

See the 'check' command to learn how to override the default selection of test
levels, suites and artefacts.


MAINTAINING CHANGELOGS

If a file exists with '.changes' extension, otherwise matching the
RPM SPEC file name, the effect will be the same as having a %changelog
section in the SPEC file.  If a file with '.changes.run' extension is
found instead, this file will be executed and its output treated as
the actual changelog. If both kind of files are found, '.changes.run'
takes precedence.


SIGNING PACKAGES

The 'build' and 'package' commands allow to optionally sign the resulting
packages. Use the '--sign' option to these commands to enable this step.

The cryptographic key to use for signing can be selected with the global
option '--package-signing-user'. If this is a passphrase-protected key, the
passphrase may be supplied with one of the '--package-signing-passphrase' or
'--package-signing-passphrase-file' options. Set up yourself a shell alias to
apply these by default:

    $ touch PASSFILE
    $ chmod 600 PASSFILE
    $ cat >PASSFILE
    YourPassphrase
    ^D
    $ alias mb2="mb2 --package-signing-user NAME \\
        --package-signing-passphrase-file PASSFILE"

The GPG version on host and under the SDK are not likely to match - for that
reason the SDK uses its own GPG home directory and therefore your secret key
must be imported under the SDK before it can be used. This can be achieved
with command like this (executed on host):

    $ gpg --export-secret-keys ID |sdk gpg2 --import

(Replace 'sdk' with the command you use to enter the SDK)


MANAGING SDK BUILD TOOLS

The Sailfish SDK abstracts the support for cross-development for particular
target HW architectures and Sailfish OS versions (forward compatibility
applies) in form of add-on SDK Build Tools.

Two types of SDK Build Tools are recognized:

1. SDK Build Target - a target-compatible Sailfish OS image, and

2. SDK Build Tooling - a host-compatible collection of programs used at build
   time.

The shorter terms "build tools", "(build) tooling(s)" and "(build) target(s)"
are used instead of the official terms where the context admits.

Pairs of build targets and toolings are combined using the Scratchbox 2
technology to form complete, virtual build environments, where developers get
the feeling of building software the native way while cross-compilation
actually happens under the hood.

Build toolings are treated as read-only by Scratchbox 2. Any changes done
under the build environment presented by Scratchbox 2 are persisted under the
build targets.

SDK Build Tools can be managed using the standalone 'sdk-assistant' command.


COMMANDS
    build-init [<project-dir-or-file>]
        Initialize build directory. This is normally done implicitly by the
        'build', 'qmake' or 'cmake' commands and does not need to be done
        explicitly with this comand.

        When a <project-dir-or-file> is passed, a shadow build directory will
        be initialized.  Otherwise the current working directory is treated as
        the project directory.

        Initializing a build directory creates a hidden directory called
        '.mb2'. Removing this directory stops mb2 treating the containing
        directory as a build directory.

    build-requires [--[no-]refresh] {pull|reset|diff}
        When 'pull' is used, install or update the build-time dependencies as
        is done implicitly by the 'prepare', 'build' and 'qmake' (or 'cmake')
        commands but refresh the package cache first, unless '--no-refresh' is
        active.

        Unless the global '--no-snapshot' option is used, '--no-refresh' is
        implied.  This can be overriden with '--refresh' - do not use unless
        you know what you are doing. If refresh is needed, refresh (and
        update) the original target instead.

        Commands described below are not available when the '--no-snapshot'
        global option is used.

        The 'reset' command is an extended version of 'pull'. It ensures that
        nothing else than the current build-time dependencies is pulled in on
        top of the clean build environment. It does it by resetting the target
        snapshot to the (possibly updated) state of the original target first,
        pulling later.

        The 'diff' command tells how the current build environment differs from
        the clean build environment in terms of package installations, removals
        and replacements.

    prepare
        Run rpmbuild. Execute just the %prep section of the RPM SPEC file.

        When a supported version control system (VCS) is used to manage the
        source tree, any changes introduced by executing the %prep section
        will be applied with the help of the VCS in order to keep your own
        changes safe and clearly separated.

        Speaking the language of Git, the only supported VCS at the moment,
        new commits will be created - one for each applied patch and a final
        one for changes introduced other way than by applying patches.

        Changes applied this way are ignored by the '--fix-version' global
        option. If you have no other changes, it will not indicate any change
        since the last tagged version.

        Changes originating from patches may be updated by usual ways of Git
        usage, after which the 'scrape' command can be used to propagate
        the changes back to the patches.

        The above described behavior may be suppressed by the '--no-vcs-apply'
        global option.


    apply [-R]
        Apply all patches defined in the RPM SPEC file to the source tree.

        Changes introduced this way are not automatically recorded in a
        version control system as is normally done by the 'prepare' command.

        -R    reverse apply patches (patch -R)

    build [-p|--prepare] [--no-check] [-d|--enable-debug] [-j <n>] [-s|--sign] 
          [<project-dir-or-file>] [--] [<rpmbuild-extra-arg>...]
        Run rpmbuild. Execute all sections of the RPM SPEC file except for
        %prep unless told otherwise.

        If path to a <project-dir-or-file> is passed, shadow build will be
        done. Otherwise the current working directory is treated as the
        project directory. (The file name part is ignored - project file
        selection is done by qmake call inside the SPEC file.)

        --prepare         Run %prep section as well. This has the same effect
                          as using the 'prepare' command first
        --no-check        Skip the %check section and rpmlint
        --enable-debug    Enable debug build
        -j <n>            Run up to <n> jobs simultaneously
        --sign            Sign packages

    qmake [<project-dir-or-file>] [--] [<qmake-extra-arg>...]
        Run rpmbuild. Execute just the %build section of the RPM SPEC file,
        suppressing any 'make' invocation figuring in that section.

        If path to a <project-dir-or-file> is passed, shadow build will be
        done. Otherwise the current working directory is treated as the
        project directory. (The file name part is ignored - project file
        selection is done by qmake call inside the SPEC file.)

    cmake [<project-dir>] [--] [<cmake-extra-arg>...]
    cmake --build . [<cmake-extra-arg>...] [-- <build-tool-extra-arg>...]
        Run rpmbuild. Execute just the %build section of the RPM SPEC file,
        suppressing any 'cmake' invocation figuring in that section, that does
        not match the execution form used here, i.e., does not trigger the
        desired action, namely 1) generate a project buildsystem or
        2) build the project.

        Additionally, any plain 'make' invocation will be recognized as
        implementing the 'build the project' action and will be treated
        accordingly. Any possible <cmake-extra-arg> will be silently ignored
        and any possible <build-tools-extra-arg> will be used equally for
        each 'make' invocation in that case.

        If path to a <project-dir> is passed, shadow build will be done.
        Otherwise the current working directory is treated as the project
        directory.

    make [<make-extra-arg>...]
        Run rpmbuild. Execute just the %build section of the RPM SPEC file,
        suppressing any command figuring in that section, that is recognized
        as a command to generate project buildsystem. The following is a
        complete list of such commands:

        - any 'qmake' invocation
        - any 'cmake' invocation using the corresponding execution form

        For CMake based projects the 'make <make-extra-arg>...' command is
        equivalent to 'cmake --build . -- <make-extra-arg>...' (provided that
        'make' is the build tool used by the project).

    make-install
        Run rpmbuild. Execute just the %install section of the RPM SPEC file in
        order to populate the RPM "buildroot".

    package [--no-check] [-s|--sign]
        Run rpmbuild. Execute just the %install section of the RPM SPEC file,
        then build binary packages and optionally sign those. Unless
        '--no-check' is passed, execute also the %check section if present.

    build-shell [--maintain] [--] [<command> [<arg>...]]
        Execute an arbitrary <command> in the build environment.  If no
        <command> is specified, an interactive shell is opened.

        Pass the '--maintain' option when running maintenance commands - such
        that are used to inspect and/or modify the build environment
        installation.  Failure to do so leads to undefined results.

        Unless the '--no-snapshot' global option is used, any modifications to
        the build environment introduced this way can be reverted with the
        'build-requires reset' command, which also happens implicitly under
        certain conditions. Use any of the '[pre-|post-]pull-build-requires'
        hooks to persist your build environment modifications.  Alternatively,
        use the standalone 'sdk-assistant maintain' command for direct
        modification of the original build target.

        If all you are looking for is a way to supply locally available build
        time dependencies conveniently, see the '--search-output-dir' global
        option.

    compiledb [<make-extra-arg>...]
        Run rpmbuild. Execute just the %build section of the RPM SPEC file,
        modifying any 'make' invocation figuring in that section in order to
        generate Clang's JSON Compilation Database file
        'compile_commands.json' as described below.

        Unless the (top-level) makefile defines a target named 'compiledb' or
        'compile_commands.json', 'make' will be wrapped with the 'compiledb'
        tool as the default option to generate the compilation database.

        A custom mechanism to generate the compilation database may be
        supplied by defining one of the above mentioned targets. In that case
        'make' will be invoked with command line modified by adding that
        target name and removing any other target-like argument. (Any
        non-option, non-assignment argument will be removed.  Options that
        accept arguments need to be sticked together with their arguments with
        '='.)

    check --list-suites
    check [-l|--levels [+|-]<level>...] [-s|--suites [+|-]<suite>...]
          [--] [<artefact>...]
        Perform quality checks.

        By default, test suites operate on the artefacts associated with build
        tree under CWD. Alternatively, single path to a source tree and/or one
        or more package files may be specified as <artefact> on command line
        (build tree is always looked for under CWD).

        Each test suite realizes testing on one or more of the four recognized
        test levels. This is indicated in the output produced with the
        '--list-suites' option, using the keywords found in the first column
        of the following table.

        | Level          | Tested art. | Executed at   | Default |
        |----------------+-------------+---------------+---------|
        |    static      |             |               |         |
        | 1. ├── source  | Sources     | Build host    | +       |
        | 2. └── package | Packages    | Build host    | +       |
        |    dynamic     |             |               |         |
        | 3. ├── unit    | Build tree  | Build host    | -       |
        | 4. └── system  | Packages    | Target device | -       |

        (See also QUALITY ASSURANCE above.)

        -l, --levels [+|-]<level>...
            Select testing levels. Levels are selected using the keywords
            'source', 'package', 'static', 'unit', 'system' and 'dynamic' as
            described before. The default selection is indicated in the above
            table and can be overriden with the global option
            '--check-levels'.

        --list-suites
            List available test suites.

        -s, --suites [+|-]<suite>...
            Select test suites to use. By default all and only the essential
            suites are selected. See '--list-suites'. The default selection
            may be overriden with the global option '--check-suites'.

        The '--levels' and '--suites' options may be passed multiple values,
        separated with comma. If a name is prefixed with '+' or '-', the
        effect is cumulative and persists until overriden with later name
        prefixed with the opposite operator.

    deploy {--pkcon|--rsync|--sdk|--zypper|--zypper-dup|--manual} [--all]
           [--debug] [-n|--dry-run] [--] [[+|-]<pattern>...]
        Deploy build results to a device.

        By default, just the build results of the project under current working
        directory will be deployed. The '--all' option can be used to require
        deployment of all packages found under the output directory, no matter
        of their origin. The '--all' option is not available when the '--rsync'
        method is used.

        With the '--dry-run' option, the list of (sub)packages that would be
        deployed may be previewed without applying the changes.  Depending on
        the selected deployment method, the packages may still get transferred
        to the device.

        Selection of (sub)packages for deployment can be influenced with glob
        <pattern>s, each of which may be preceded with '+' or '-' to opt for
        either inclusion (default) or exclusion respectively as the desired
        effect.  The effect of multiple patterns is cumulative, evaluated from
        left to right. The very first pattern determines whether it starts by
        removing from the full list or by adding to an empty list.  Extended,
        Bash-compatible patterns are allowed.  The '-*-debug@(info|source)'
        pattern is implied unless the '--debug' option is used.  Options must
        precede patterns if any.

        The following deployment methods are available, corresponding to the
        mandatory option:

        --pkcon
            Uses 'pkcon' to install RPM packages.  Not all device operating
            system versions and configurations permit use of this method.

        --rsync
            This method is special.  It can be used to transfer the RPM
            "buildroot" to the device as '/opt/sdk/<name>' without building
            and installing true RPM packages.  Use the 'make-install' command
            to populate the RPM buildroot.  Incompatible with '--all'.

        --sdk
            Uses the method native to the SDK to install RPM packages.
            Requires developer mode enabled on the device.

        --zypper
            Uses 'zypper' to install RPM packages.  Requires 'zypper'
            installed on the device.  Requires root access to the device.

        --zypper-dup
            Uses 'zypper dup' to install RPM packages.  Requires 'zypper'
            installed on the device.  Requires root access to the device.

            This method allows to deploy updates to platform packages
            conveniently.  With platform packages, subpackages specific to a
            particular device variant often exist, so a careful selection of
            subpackages to install is needed.  Compared to the '--zypper'
            method, which uses 'zypper in' and so fully relies on the user to
            select packages to install, the '--zypper-dup' method is capable
            of figuring out the right subset of packages automatically in many
            cases.

        --manual
            Only transfers the RPM packages to the device for manual
            installation. Uses '~/RPMS' as the target directory on the device.

        When "Requires root access to the device" is stated, the device will
        be accessed as the 'root' user instead of the configured user. Other
        connection parameters remain unchanged.

    undeploy {--sdk|--pkcon|--rpm|--rsync|--zypper} [--all] [-n|--dry-run]
             [--] [[+|-]<pattern>...]
        Undeploy build results previously deployed to a device.

        Initially, device packages are collected based on the build host name
        recorded in their metadata.  Just those packages that seem to
        originate from the host where mb2 is running will be considered for
        removal from the device.  For that it is vital that the host name is
        stable and unique among the host names recorded by packages installed
        from other sources.

        Further, unless the '--all' option is used, just the build results of
        the project under current working directory will be considered for
        removal from the device.  When the '--all' option is used, any build
        results originated from this build host will be considered.  (Note how
        the effect of '--all' differs between 'deploy' and 'undeploy'.)

        Package origin is determined based on the build host name recorded in
        its metadata.  For that it is vital that 1) the build host name has
        not changed since the time the packages were built and that 2) the
        build host name is unique among the host names recorded by packages
        installed from other sources.  See the 'engine' command to learn more
        about the build host name.

        With the '--dry-run' option, the list of (sub)packages that would be
        removed may be previewed without applying the changes.

        If needed, the selection can be furthermore adjusted with glob
        <pattern>s, each of which may be preceded with '+' or '-' to opt for
        either inclusion (default) or exclusion respectively as the desired
        effect.  The effect of multiple patterns is cumulative, evaluated from
        left to right. The very first pattern determines whether it starts by
        removing from the full list or by adding to an empty list.
        Extended, Bash-compatible patterns are allowed.  Options must precede
        patterns if any.

        Except for the '--rsync' method, the selected method is not required to
        match the method previously used with the 'deploy' command.

        The following methods of undeployment are available, corresponding to
        the mandatory option:

        --pkcon
            Uses 'pkcon' to remove RPM packages.  Not all device operating
            system versions and configurations permit use of this method.

        --rpm
            Uses plain 'rpm' to remove RPM packages.  Requires root access
            to the device.

        --rsync
            This method is special.  It does not work with RPM packages.  It
            is only capable of removing files previously copied under
            '/opt/sdk/<name>' with 'deploy --rsync'.  (The origin of the files
            is not verified!)

        --sdk
            Uses the method native to the SDK to remove RPM packages.
            Requires developer mode enabled on the device. Not available with
            older device operating system versions.

        --zypper
            Uses 'zypper' to remove RPM packages.  Requires 'zypper'
            installed on the device. Requires root access to the device.

        When "Requires root access to the device" is stated, the device will
        be accessed as the 'root' user instead of the configured user. Other
        connection parameters remain unchanged.

    run [<command> [<arg>...]]
        Run the given command on the device specified with '--device'.  Can be
        used for running gdb and a gdb server.

    scrape [-n|--dry-run] [--keep] [-o|--output-dir <dir>] [--stable]
        Scrape modifications from sources and save them as patches.

        This command is specifically designed to support maintenance of
        packages where

        1) the sources are under version control by Git,
        2) another source repository is embedded as a Git submodule, and
        3) the embedded repository is not write-accessible.

        If a need to update the code under the embedded repository (submodule)
        arises in this case, the changes may be shared in form of patch files
        versioned under the embedding repository (superproject). This command
        makes this option less tedious, automating the job of collecting
        patches for new commits appearing in submodules (recursively),
        updating existing patch files in a safe manner and as a last step
        removing the corresponding commits from submodules.

        This command may be used in pair with the 'prepare' command, which is
        capable of doing the opposite job: applying patches as commits.

        This command enforces certain patch formatting and file naming
        conventions. When used in pair with the 'prepare' command, it is not
        guaranteed (not desired) that it produces patch files matching the
        original patch files. Among others the file numbering is enforced,
        using discrete number ranges for patches originating from different
        submodules.

        Use the '--stable' option when it is desired to preserve the original
        file names of patches previously applied with the 'prepare' command.

        Unless overriden with the '--output-dir' option, patches will be
        stored under the well known 'rpm' directory (where the RPM SPEC file
        lives).

        Use the '--dry-run' option to preview the list of commits considered
        for scraping - there are no safety limits on the number of commits to
        process.

        For every Git submodule, all commits reported by 'git submodule
        summary' executed under its superproject are considered. Out of these,
        commits which have the 'Mb2-scrape: discard' trailer line in their
        commit message, will be scraped without creating a patch.

        When the '--keep' option is used, commits will not be removed from
        submodules after patches are created from them.


GLOBAL OPTIONS
    The order of passing options matters. Options that occur later on the
    command line may override earlier options, including those implied by
    the earlier options.

    -i, --increment
        Increment release number in the RPM SPEC file

    -t, --target <name>
        Specify the sb2 target to use

    -d, --device <name>
        Specify the device

    -o, --output-dir <dir>
        Defaults to './RPMS'. The resulting RPM packages will be placed
        directly under the specified <dir>.  Implies '--search-output-dir'.

    -O, --output-prefix <dir>
        Much like '--output-dir' except that a subdirectory of the given <dir>
        will be used, denoted by the build target name (see '--target'). When
        the '--task' option is active, output directories will be further
        grouped by task name ('<dir>/[<task>/]<target>').

    --search-output-dir[={verbose|quiet}]
        Consider the RPM packages found in the output directory when installing
        build time dependencies. The optional value controls zypper verbosity.
        The default is 'verbose'.

    --no-search-output-dir
        Override '--search-output-dir' option

    -s, --specfile <file>
        Specify the RPM SPEC file

    -S, --snapshot[=<suffix>]
        Use a particular snapshot of the build target. The snapshot name is
        determined by combining the original target name with the given
        <suffix>, separated with dot.

        By default, i.e., when this option is not used, the snapshot with
        suffix '$DEF_SNAPSHOT' is used.

        When this option is used but no <suffix> argument is passed, '%pool'
        is assumed for compatibility with older mb2 versions. (Deprecated)

        When <suffix> is '%pool[,<N>]', a temporary snapshot will be used from
        a pool of the optionally given size <N> (default: 5).  The actual
        suffix will follow the pattern 'pool.XXX'.  Each temporary snapshot is
        used exclusively with single build tree at a time and it is reset
        automatically whenever it is taken into use with another build tree.

        See the CLEAN BUILDS section above for more information.

    --no-snapshot[=force]
        Use build targets directly. mb2 will refuse to directly use a target
        for which snapshots exist unless 'force' is passed as the optional
        argument.

    -f, --shared-folder <dir>
        The folder where QtCreator shares devices.xml and ssh keys. This option
        is useful when the deploy command is used outside of virtual machine.

    -T, --task[=<name>|=git:<regex>]
        Modifies behavior of '--output-prefix' option.
        The task name can be specified either directly as <name> or it can be
        derived from the current Git branch name, using the POSIX extended
        regular expression <regex> both as a condition to enable this option
        and as a mean to select substring of the branch name to be used as
        the task name.  Defaults to '$DEF_TASK'.
        The default expression is suitable to look for a leading bug reference
        in the branch name.  E.g. 'bug1234' in 'bug1234-quickfix'.

    --no-task
        Override --task option

    --wrap <command>:<wrapper>
        Instruct rpmbuild to use the given <wrapper> as a replacement for the
        <command>. <command> must be a valid file name, without directory part
        while <wrapper> must be specified as an absolute file path.

    -x, --fix-version[=<tag>]
        This option is implied when used inside a Git working tree (use -X to
        override). When used via Qt Creator, it is only implied if the version
        recorded in the RPM SPEC file is exactly "0".

        With this option the version recorded in the SPEC file will be
        ignored.  Instead, package version will be derived from the name of the
        latest tag in the current git branch or the selected tag <tag>.

        If the current HEAD, index or working tree differs from the tree
        denoted by the (latest or selected) tag, a suffix composed from the
        current branch name, time stamp and a commit SHA1 will be added to the
        package version. If git-state is not clean a git-stash will be created
        and its SHA1 will be used.

        It is recommended to store 'Version: 0' and 'Release: 1' in the SPEC
        file when this option is to be used; as a sign that the version
        recorded there is not relevant.

    -X, --no-fix-version
        Override --fix-version option.

    -n, --no-pull-build-requires
        Do not update build-time dependencies unless required explicitly with
        'build-requires {pull|reset}'.

    -c, --changelog[=<args>]
        Include changelog generated from Git history with 'sfdk-changelog'
        command, forwarding any <args>.  This option is not intended for
        regular use - create a script file named after the RPM SPEC file but 
        with '.changes.run' extension to instruct $ME to generate changelog
        with 'sfdk-changelog <args>' instead.

    --no-vcs-apply
        Do not record applied patches in the version control system, ignore
        safety checks. See the 'prepare' command for more information.

    --build-shell-args <args>
        Arguments to pass to sb2 when used to run build commands. Defaults to no
        arguments.

        <args> will be split into arguments as with shell word splitting. Use
        backslash to escape possible whitespace characters within arguments.

        The essential '-t <target>' option is passed to sb2 independently of
        this option and it is an error to include it in the <args>.

    --build-shell-maintenance-args <args>
        Arguments to pass to sb2 when used to run maintenance commands.
        Defaults to '-m sdk-install -R'.

        <args> will be split into arguments as with shell word splitting. Use
        backslash to escape possible whitespace characters within arguments.

        The essential '-t <target>' option is passed to sb2 independently of
        this option and it is an error to include it in the <args>.

    --check-levels [+|-]<level>...
        Override the default test levels the 'check' command operates on. See
        the '--levels' option to the 'check' command for more details.

    --check-suites [+|-]<suite>...
        Override the default test suites the 'check' command executes. See
        the '--suites' option to the 'check' command for more details.

    --package-signing-user <name>
        Select the GPG key to sign packages with. Accepts the same values
        as the '--local-user' option of the gpg command does. See GPG
        documentation for more details.

        Depending on your SDK variant, different semantics may apply.

    --package-signing-passphrase <string>
        In case a passphrase-protected signing key is selected for signing
        packages, use <string> as the passphrase.
        
    --package-signing-passphrase-file <file>
        In case a passphrase-protected signing key is selected for signing
        packages, read the passphrase from the <file>. Only the first line will
        be read.

    --package-timeline
        Preserve older packages in the output directory.

    --hooks-dir <dir>
        The directory where to look for hooks. See HOOKS below.


HOOKS
    $ME allows altering its behavior with hooks in form of executable files
    located under a directory specified with the '--hooks-dir' option.

    Hooks may receive their inputs through program arguments, environment
    variables and standard input stream as described below.

    The following hooks are recognized:

    pre-pull-build-requires [<dependency>...]
        This hook is invoked by commands that take care of updating build time
        dependencies – prior to doing so.

        The arguments and available environment variables are the same as with
        the 'pull-build-requires' hook.

    pull-build-requires [<dependency>...]
        This hook is invoked by commands that take care of updating build time
        dependencies. This hook fully overrides the default procedure of
        updating build time dependencies.

        Any <dependency> determined by querying the RPM SPEC file will be
        passed as an argument to this hook.

        The following environment variables provide additional information to
        this hook:

        SFDK_OUTPUT_DIR
            The effective output directory, possibly influenced by the
            '--output-dir' or '--output-prefix' global options.

        SFDK_OUTPUT_DIR_FILTERED
            Only set when the '--search-output-dir' global option is used.
            This points to a copy of the output directory with the RPM
            binaries built from the current project excluded.

        SFDK_TARGET
            The active build target. Unless the '--no-snapshot' global option
            is used, this is the snapshot target name and the original build
            target name is available as 'SFDK_TARGET_ORIGINAL'.

        SFDK_TARGET_ORIGINAL
            See 'SFDK_TARGET'.

        SFDK_SPEC_FILE
            The RPM SPEC file used for this build.

    post-pull-build-requires [<dependency>...]
        This hook is invoked by commands that take care of updating build time
        dependencies – after doing so.

        The arguments and available environment variables are the same as with
        the 'pull-build-requires' hook.

    post-package <package-file>...
        This hook is invoked whenever binary packages have been produced.

        Any <package-file> that has been produced is passed as an argument to
        this hook.

    pre-sign <package-file>...
        This hook is invoked whenever binary packages have been produced but
        prior to signing them.

        Any <package-file> that has been produced is passed as an argument to
        this hook.

    prepare-device <name>
        This hook is invoked prior to accessing a device.


KNOWN ISSUES

Build target snapshot is not reset automatically when the original target is
changed just by removing packages via plain RPM usage. Either use zypper for
all package operations or use 'mb2 build-requires reset' to ensure snapshot is
reset.
EOF
}

notice() {
    echo "NOTICE: $*" >&2
}

fatal() {
    echo "Fatal: $*" >&2
    exit 1
}

# readarray is a bash >= 4.0 feature
if ! type readarray &>/dev/null; then
    readarray() {
        if ! [[ $# -eq 2 && $1 == -t && $(declare -p "$2" 2>/dev/null) == "declare -a "* ]]; then
            fatal "Internal error: readarray: unimplemented case"
        fi

        local line=
        eval $2=\(\)
        while IFS= read -r line; do
            eval $2=\(\${$2[@]:+\"\${$2[@]}\"} \"\$line\"\)
        done
    }
fi

find_upwards() {
    local start=$1
    local name=$2

    local ancestor=$start
    while [[ ! -e $ancestor/$name && $ancestor != / ]]; do
        ancestor=$(dirname "$ancestor")
    done

    if [[ -e $ancestor/$name ]]; then
        printf '%s\n' "$ancestor"
    else
        return 1
    fi
}

set_insert() {
    eval local set=(\${$1[*]})
    local item=$2

    set=$(IFS=$'\n'; sort -u <<<"${set[*]}$IFS$item")

    eval $1=\(\$set\)
}

set_remove() {
    eval local set=(\${$1[*]})
    local item=$2

    set=$(IFS=$'\n'; grep --line-regexp -v -F "$item" <<<"${set[*]}")

    eval $1=\(\$set\)
}

set_intersect() {
    eval local set1=(\${$2[*]})
    eval local set2=(\${$3[*]})
    local out=()

    set1=$(IFS=$'\n'; sort <<<"${set1[*]}")
    set2=$(IFS=$'\n'; sort <<<"${set2[*]}")

    out=$(join <(cat <<<"$set1") <(cat <<<"$set2"))

    eval $1=\(\$out\)
}

# Usage: set_update <set> [+-]<item>...
#
# Update the set with the given items. If no item is prefixed with '+' or '-' it
# will simply replace the content of the set. The effect of '+' and '-' persists
# until overriden with later item prefixed with the opposite operator.
set_update() {
    eval local set=(\${$1[*]})
    local items=("${@:2}")

    local op= item=
    for item in "${items[@]}"; do
        if [[ $item == [+-]* ]]; then
            op=${item:0:1}
            item=${item:1}
        fi
        case $op in
            # TODO with Bash 4 reimplement set_insert and set_remove so that they can be used here
            +) set=($(IFS=$'\n'; sort -u <<<"${set[*]}$IFS$item"));;
            -) set=($(IFS=$'\n'; grep --line-regexp -v -F "$item" <<<"${set[*]}"));;
            '')
                set=($item)
                op=+
                ;;
        esac
    done

    eval $1=\(\${set[*]}\)
}

set_contains() {
    eval local set=(\${$1[*]})
    local item=$2

    [[ " ${set[*]} " == *" $item "* ]]
}

# Combine two recipes in form of comma separated list of "[+-]<item>" items
combine_set_update_recipes() {
    local recipe1=$1
    local recipe2=$2

    if [[ ! $recipe2 ]]; then
        printf '%s' "$recipe1"
    elif [[ $recipe1 && $recipe2 == [+-]* ]]; then
        printf '%s,%s' "$recipe1" "$recipe2"
    else
        printf '%s' "$recipe2"
    fi
}

git_() (
    local args=("$@")

    # Cannot use the '-C' option - it requires newer Git.
    # Cannot use '--git-dir' + '--work-tree' options - they break at least git-stash.
    if [[ $OPT_SRC_DIR ]]; then
        cd "$OPT_SRC_DIR" || return
    fi

    # At least git-stash requires an explicitly set user identity. Setting environment
    # variables compared to passing options with '-c' works also for subprocesses invoked via
    # `git-submodule foreach`
    if inside_build_engine; then
        export GIT_AUTHOR_NAME=$FAKE_GIT_AUTHOR GIT_AUTHOR_EMAIL=$FAKE_GIT_EMAIL
        export GIT_COMMITTER_NAME=$FAKE_GIT_AUTHOR GIT_COMMITTER_EMAIL=$FAKE_GIT_EMAIL
    fi

    git "${args[@]}"
)

oomadvice() {
    /usr/libexec/sailfish-sdk-setup/oomadvice "$@"
}

make_gvariant_dict() {
    local names=("$@")

    local name= value= body=
    for name in ${names:+"${names[@]}"}; do
        eval value=\$$name
        body+=${body:+, }'"'$name'"':'"'$value'"'
    done

    printf "{%s}\n" "$body"
}

make_gvariant_array() {
    local items=("$@")

    local item= body=
    for item in ${items:+"${items[@]}"}; do
        body+=${body:+, }'"'$item'"'
    done

    printf "[%s]\n" "$body"
}

global_lock_init() {
    [[ $MB2_SELF_TEST_RUN ]] && return

    # Lock files need to be available from sb2, hence the use of $HOME.

    # Two lock files are used in order to allow child processes to acquire the
    # lock on behalf of mb2. This is done by interchanging the main lock file.
    #
    # Special care needs to be taken when the two mechanisms need to be nested.
    # See how EXTENDED_PRE_INSTALL_LOCK_ARGS is overriden locally after calling
    # global_lock_acquire at some places.
    GLOBAL_LOCK=$HOME/.$ME.lock
    GLOBAL_LOCK_INTERCHANGE_LOCK=$HOME/.$ME.lock.lock

    # A child process may interchange the actual lock for this one, pre-acquired
    # by mb2 and released only when mb2 exits.
    #
    # TODO global cleanup
    trap '[[ $GLOBAL_LOCK_EXTENDED_LOCK ]] && rm -f "$GLOBAL_LOCK_EXTENDED_LOCK"' EXIT
    GLOBAL_LOCK_EXTENDED_LOCK=$(mktemp "$HOME/.$ME.lock.XXX") || return
    exec {GLOBAL_LOCK_EXTENDED_LOCK_FD}>"$GLOBAL_LOCK_EXTENDED_LOCK"
    flock "$GLOBAL_LOCK_EXTENDED_LOCK_FD"

    # Keep in sync with `rpm --showrc`
    local def_spec_install_pre_head='%{___build_pre}'
    local def_spec_install_pre_tail='[ "$RPM_BUILD_ROOT" != "/" ] && rm -rf "${RPM_BUILD_ROOT}"\
mkdir -p `dirname "$RPM_BUILD_ROOT"`\
mkdir "$RPM_BUILD_ROOT"\
%{nil}'

    # Let rpmbuild acquire the lock at the beginning of the %install section
    # (before the shared build root or a shared output directory is accessed)
    EXTENDED_PRE_INSTALL_LOCK_ARGS=(--define "__spec_install_pre \
$def_spec_install_pre_head\\
flock ${GLOBAL_LOCK_INTERCHANGE_LOCK@Q} flock ${GLOBAL_LOCK@Q} \\\\\\
    ln -f ${GLOBAL_LOCK_EXTENDED_LOCK@Q} ${GLOBAL_LOCK@Q}\\
$def_spec_install_pre_tail")
}

global_lock_acquire() {
    [[ $MB2_SELF_TEST_RUN ]] && return

    exec {GLOBAL_LOCK_INTERCHANGE_LOCK_FD}>"$GLOBAL_LOCK_INTERCHANGE_LOCK"
    flock "$GLOBAL_LOCK_INTERCHANGE_LOCK_FD"
    exec {GLOBAL_LOCK_FD}>"$GLOBAL_LOCK"
    flock "$GLOBAL_LOCK_FD"
}

global_lock_release() {
    [[ $MB2_SELF_TEST_RUN ]] && return

    flock --unlock "$GLOBAL_LOCK_FD"
    flock --unlock "$GLOBAL_LOCK_INTERCHANGE_LOCK_FD"
}

has_hook() {
    local hook=$1

    [[ $OPT_HOOKS_DIR ]] || return

    # Do not check for executable permission - it would not be always possible
    # to get this right under build engine.
    [[ -f $OPT_HOOKS_DIR/$hook ]] || return
}

run_hook() {
    local hook=$1
    local exports=($2)
    local args=("${@:3}")

    has_hook "$hook" || return 0

    local SFDK_OUTPUT_DIR=$OPT_OUTPUTDIR
    local SFDK_SOURCE_DIR=${OPT_SRC_DIR:-$PWD}
    local SFDK_TARGET=$OPT_TARGET
    local SFDK_TARGET_ORIGINAL=$OPT_ORIGINAL_TARGET
    local SFDK_SPEC_FILE=$OPT_SPEC

    exports+=(SFDK_OUTPUT_DIR)
    exports+=(SFDK_SOURCE_DIR)
    exports+=(SFDK_TARGET)
    exports+=(SFDK_TARGET_ORIGINAL)
    exports+=(SFDK_SPEC_FILE)

    if ! inside_build_engine; then
        (
            export ${exports[*]}
            "$OPT_HOOKS_DIR/$hook" ${args:+"${args[@]}"}
        )
        return
    fi

    local environment=$(make_gvariant_dict ${exports[*]})
    local args_=$(make_gvariant_array ${args:+"${args[@]}"})

    local reply=
    reply=$(gdbus call --timeout=30000 \
        --address="$(</run/sdk-setup/sfdk_bus_address)" \
        --dest="$SAILFISH_SDK_SFDK_DBUS_SERVICE" --object-path / \
        --method org.sailfishos.sfdk.Sdk.runHook \
        "$OPT_HOOKS_DIR/$hook" "$args_" "$PWD" "$environment") \
        || return

    if [[ $reply == "()" ]]; then
        return 0
    elif [[ $reply =~ ^\(([0-9]+),\)$ ]]; then
        return ${BASH_REMATCH[1]}
    else
        fatal "Internal error: Unexpected reply from a hook: \"$reply\""
    fi
}

get_hostname_from_devices_xml() {
    local TAG_NAME= ATTRIBUTES=
    while read_dom; do
        if [[ $TAG_NAME == host ]]; then
            eval "local $ATTRIBUTES"
            printf '%s\n' "$name"
            break
        fi
    done < $(get_shared_dir)/devices.xml
}

get_hostname() {
    local hostname=
    if inside_build_engine; then
        hostname=$(get_hostname_from_devices_xml)
    else
        hostname=$({ hostname --fqdn; hostname; } \
            |sed '/^localhost\(\.localdomain\)\?$/d; q')
    fi

    printf '%s\n' "${hostname:-localhost.localdomain}"
}

try_to_make_spec() { # Tries to create a missing spec
    [[ -f "$1" ]] && return # It's not missing
    local yaml="${1%.spec}.yaml"
    [[ -f "$yaml" ]] || return # No yaml
    ANSI_COLORS_DISABLED=1 specify -n -N "$yaml" \
        || fatal "Failed to convert YAML to RPM SPEC file"
}

try_to_make_spec_from_yaml() {
    # Tries to create a missing spec from a given yaml
    try_to_make_spec "${1%.yaml}.spec"
}

sanitize_changelog() {
    sed 's/%/%%/g'
}

try_to_make_changelog() {
    local changes_file=${OPT_SPEC%.spec}.changes
    local changes_run_file=${OPT_SPEC%.spec}.changes.run

    if [[ $OPT_CHANGELOG ]]; then
        notice "Auto generating changelog from Git history…"
        echo "%changelog"
        sfdk-changelog $OPT_CHANGELOG_ARGS |sanitize_changelog
    elif [[ -f $changes_run_file ]]; then
        notice "Generating changelog entries with '$changes_run_file'…"
        local runner=
        # In build VM the shared filesystem is noexec
        if ! [[ -x $changes_run_file ]]; then
            if [[ $(head -c 3 <$changes_run_file |cat -v) == '#!/' ]]; then
                runner=$(sed -n '1s/^#!//p' $changes_run_file)
            else
                fatal "File is not executable: '$changes_run_file'"
            fi
        fi
        echo "%changelog"
        $runner $changes_run_file |sanitize_changelog
        [[ ${PIPESTATUS[0]} -eq 0 ]] || fatal "Failed to generate changelog entries"
    elif [[ -f $changes_file ]]; then
        notice "Appending changelog entries to the RPM SPEC file…"
        echo "%changelog"
        cat $changes_file |sanitize_changelog
    fi
}

ensure_spec_newer_than_yaml() {
    local yaml="${OPT_SPEC%.spec}.yaml"
    if [[ -f "$yaml" ]] && [[ "$yaml" -nt "$OPT_SPEC" ]]; then # -nt is newer than
        ANSI_COLORS_DISABLED=1 specify -n -N "$yaml" \
            || fatal "Failed to convert YAML to RPM SPEC file"
    fi

    # it's ok also not to have a yaml
}

warn_if_crlf_is_used() {
    local crlf_used=

    local file=
    # Assume that these two are sufficient indicators
    for file in "${OPT_SPEC%.spec}"{.spec,.yaml}; do
        if [[ -f $file && $(file "$file") == *"CRLF line terminators"* ]]; then
            crlf_used=1
            break
        fi
    done

    if [[ $crlf_used ]]; then
        notice "Files with CRLF line endings found." \
            "Consult the Sailfish SDK FAQ to learn why to avoid that and how."
    fi
}

match_multi() {
    local name=$1 patterns=("${@:2}")

    if [[ ${#patterns[*]} -eq 0 ]]; then
        return 0
    fi

    local default_is_exclude=$([[ ${patterns[0]} != -* ]] && echo 1)

    local included= excluded= pattern=
    for pattern in "${patterns[@]}"; do
        local exclude=
        if [[ $pattern == +* ]]; then
            pattern=${pattern:1}
        elif [[ $pattern == -* ]]; then
            pattern=${pattern:1}
            exclude=1
        fi

        if [[ ! $pattern ]]; then
            notice "Got invalid, empty pattern"
            return 1
        fi

        if [[ $exclude ]]; then
            if [[ $name == $pattern ]]; then
                included=
                excluded=1
            fi
        else
            if [[ $name == $pattern ]]; then
                included=1
                excluded=
            fi
        fi
    done

    [[ (! $default_is_exclude || $included) && ! $excluded ]]
}

find_latest_rpms() {
    local dir=$1 patterns=("${@:2}")

    local latest=$(
        sb2_maintain \
            zypper --plus-repo "$dir/" search --repo ~plus-repo-1 --details \
            `# drop header` \
            |sed '1,/^--/d' \
            `# name|epoch|version|release|arch` \
            |awk -F ' *\\| *' -v OFS='|' '{
                    evr = gensub(/((.+):)?([^-]+)(-(.*))?/, "\\2|\\3|\\5", 1, $4);
                    print $2, evr, $5
                }' \
            `# sort by name and e-v-r` \
            |sort -t '|' -k 1,1 -k2,2rV -k3,3rV -k4,4rV \
            `# keep first for each name` \
            |sort -t '|' --stable -k 1,1 -u \
            `# $name $dir/$name-$evr.$arch.rpm` \
            |awk -F '|' -v dir="$dir" '{
                        evr=$3 "-" $4;
                        if ($2)
                            evr=$2 ":" evr;
                        print $1, dir "/" $1 "-" evr "." $5 ".rpm"
                    }'
    )

    [[ $latest ]] || return

    local name= pkg=
    while read name pkg; do
        if [[ ! -f "$pkg" ]]; then
            fatal "Internal error: Package does not exist: '$pkg'"
        fi
        if match_multi "$name" "${patterns[@]}"; then
            printf '%s\n' "$pkg"
        fi
    done <<<"$latest"
}

# Find binary rpms under <dir> built from the given <spec>, filtering according
# to <patterns>. If '--all' is used (must be the very first option), ignore
# <spec> and find latest packages no matter of their origin.
find_rpms()
{
    local all=
    if [[ $1 == --all ]]; then
        all=1
        shift
    fi

    local dir=$1 src_dir=$2 spec=$3 patterns=("${@:4}")

    if [[ $all ]]; then
        find_latest_rpms "$dir" "${patterns[@]}"
        return
    fi

    local wanted_source_name= wanted_evr=
    read wanted_source_name wanted_evr < <(rpmspec -q --srpm \
        --queryformat '%{NAME} %{EVR}\n' \
        --define "_sourcedir $src_dir" \
        "$spec")

    local pkg=
    while read -r pkg; do
        local name= source_rpm= evr=
        read name source_rpm evr < <(rpm -q --queryformat '%{NAME} %{SOURCERPM} %{EVR}\n' -p "$pkg")
        local source_name=${source_rpm%-$evr.src.rpm}
        if [[ $source_name == $wanted_source_name && $evr == $wanted_evr ]] \
                && match_multi "$name" "${patterns[@]}"; then
            printf '%s\n' "$pkg"
        fi
    done < <(find "$dir" -maxdepth 1 -name '*.rpm')
}

prune_older_rpms()
{
    local dir=$1 src_dir=$2 spec=$3

    local prune_source_name=$(rpmspec -q --srpm \
        --queryformat '%{NAME}' \
        --define "_sourcedir $src_dir" \
        "$spec")
    if [[ ! $prune_source_name ]]; then
        notice "Cannot prune older packages: Failed to determine source package name"
        return 1
    fi

    # Find latest EVR from the latest package file
    local pkg= latest_evr=
    while read -r pkg; do
        local source_rpm= evr=
        read source_rpm evr < <(rpm -q --queryformat '%{SOURCERPM} %{EVR}\n' -p "$pkg")
        local source_name=${source_rpm%-$evr.src.rpm}
        if [[ $source_name == $prune_source_name ]]; then
            latest_evr=$evr
            break
        fi
    done < <(find "$dir" -maxdepth 1 -name '*.rpm' -printf '%T@ %p\n' \
        |sort --reverse --numeric-sort -k 1,1 \
        |cut --complement -d ' ' -f 1)

    if [[ ! $latest_evr ]]; then
        notice "Cannot prune older packages: No package matching the source package name found"
        return 1
    fi

    # Prune matching packages with older (other) EVR
    while read -r pkg; do
        local source_rpm= evr=
        read source_rpm evr < <(rpm -q --queryformat '%{SOURCERPM} %{EVR}\n' -p "$pkg")
        local source_name=${source_rpm%-$evr.src.rpm}
        if [[ $source_name == $prune_source_name && $evr != $latest_evr ]]; then
            # Align messages with those of rpmbuild
            rm -fv "$pkg" |sed "s/^removed '\(.*\)'$/Removed: \1/"
        fi
    done < <(find "$dir" -maxdepth 1 -name '*.rpm')
}

# Copy a repo excluding packages built from the given exclude_source_name package
filter_repo()
{
    local exclude_source_name=$1 repo=$2 output=$3

    rm -rf "$output"
    mkdir -p "$output"

    local pkg=
    while read -r pkg; do
        local source_rpm= evr=
        read source_rpm evr < <(rpm -q --queryformat '%{SOURCERPM} %{EVR}\n' -p "$pkg")
        local source_name=${source_rpm%-$evr.src.rpm}
        [[ $source_name != $exclude_source_name ]] || continue
        ln -s --target-directory="$output" "$pkg"
    done < <(find "$repo" -maxdepth 1 -name '*.rpm')

    createrepo_c "$output"
}

maybe_verify_target_dependencies() {
    [[ $OPT_PULL_BUILD_REQUIRES ]] || return 0

    local spec=$1
    shift

    local opt_refresh= opt_reset=
    while [[ $# -gt 0 ]]; do
        case $1 in
            --refresh)
                opt_refresh=1
                ;;
            --reset=*)
                opt_reset=${1#*=}
                ;;
            *)
                fatal "Internal error: Unexpected argument to ${BASH_FUNCTION[0]}: '$1'"
                ;;
        esac
        shift
    done

    local extra_args=()
    set -- "${BUILD_EXTRA_ARGS[@]}"
    while [[ $# -gt 0 ]]; do
        case $1 in
            --with)
                extra_args+=(--define "_with_$2 1")
                shift
                ;;
            --without)
                extra_args+=(--define "_without_$2 1")
                shift
                ;;
            *)
                ;;
        esac
        shift
    done

    maybe_verify_target_dependencies_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        if [[ ! $OPT_NO_SNAPSHOT ]]; then
            sb2_maintain bash -c "sed -i -e '/^keeppackages=1 #mb2/d' /etc/zypp/repos.d/*.repo"
        fi
        global_lock_release
    )
    trap 'maybe_verify_target_dependencies_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    global_lock_acquire

    if [[ $opt_reset ]]; then
        if ! sdk-manage target snapshot --reset="$opt_reset" "$OPT_ORIGINAL_TARGET" "$OPT_TARGET"; then
            fatal "Failed to reset build target snapshot '$OPT_TARGET' to '$OPT_ORIGINAL_TARGET'"
        fi
    fi

    if [[ $opt_refresh ]]; then
        # Refresh repos before installing dependencies
        sb2_maintain zypper --non-interactive ref
    fi

    local deps=()
    readarray -t deps < <(sb2_build rpmspec --query --buildrequires "${extra_args[@]}" \
        --define "_sourcedir $OPT_PKGDIR" \
        "$spec" |sed 's/\s*$//')

    if inside_build_engine; then
        local session_start=$(date +%s)
    fi

    local args=(--non-interactive)

    local hook_exports=()

    if [[ $OPT_SEARCH_OUTPUTDIR ]]; then
        local filtered_outputdir=$STATEDIR/filtered-output-dir
        local exclude_source_name=$(get_spec_tag "$spec" "%{name}")
        filter_repo "$exclude_source_name" "$OPT_OUTPUTDIR" "$filtered_outputdir"
        args+=(--"$OPT_SEARCH_OUTPUTDIR_VERBOSITY" \
            --plus-repo "$filtered_outputdir" --no-gpg-checks)

        local SFDK_OUTPUT_DIR_FILTERED=$filtered_outputdir
        hook_exports+=(SFDK_OUTPUT_DIR_FILTERED)
    fi

    if [[ ! $OPT_NO_SNAPSHOT ]]; then
        # Enable package caching for build time dependencies under snapshots
        # to avoid repeated downloading after snapshot is reset.
        # Note that snapshots are configured to use original target's package
        # cache directory.
        sb2_maintain bash -c "sed -i -e '\$a keeppackages=1 #mb2' /etc/zypp/repos.d/*.repo"
    fi

    run_hook pre-pull-build-requires "${hook_exports[*]}" "${deps[@]}" \
        || fatal "The 'pre-pull-build-requires' hook failed"

    if has_hook pull-build-requires; then
        run_hook pull-build-requires "${hook_exports[*]}" "${deps[@]}" \
            || fatal "The 'pull-build-requires' hook failed"
    else
        if [[ $deps ]]; then
            sb2_maintain zypper "${args[@]}" in "${deps[@]}"
        fi

        if [[ $OPT_SEARCH_OUTPUTDIR ]]; then
            # Install first, then dup to ensure we really use packages from
            # the output directory as the repos may contain newer packages and
            # those would be pulled in preferably.  This happens not only when
            # working with older sources but also when a package hardcodes
            # version in its spec and the --fix-version option is not active.
            sb2_maintain zypper "${args[@]}" dup --from '~plus-repo-1'
        fi
    fi

    run_hook post-pull-build-requires "${hook_exports[*]}" "${deps[@]}" \
        || fatal "The 'post-pull-build-requires' hook failed"

    if [[ ! $OPT_NO_SNAPSHOT ]]; then
        sdk-manage target package-cache-prune "$OPT_TARGET"
    fi

    if inside_build_engine; then
        sdk-manage target sync "$OPT_TARGET" "$session_start"
    fi
}

get_spec_tag() {
    local spec=$1 query=$2
    rpmspec --query --srpm --define "_sourcedir $OPT_PKGDIR" \
        --queryformat="$query" "$spec"
}

# Do a simple, one level expansion of %include directions. Failure to process
# an %include directive is not considered fatal and as such is copied
# unchanged to the output.
flatten_spec() {
    local spec_file=$1

    local line=
    while IFS= read -r line; do
        if [[ $line == "%include "* ]]; then
            local path=${line#%include }
            path=${path// /}
            if [[ $path != *%* && -f "$path" ]]; then
                cat "$path"
                continue
            fi
        fi
        printf '%s\n' "$line"
    done <"$spec_file"
}

has_windows_linefeeds() {
    local file=$1
    grep -q -F -e $'\r' "$file"
}

drop_windows_linefeeds() {
    local file=$1
    sed -i "s/\x0D$//g" "$file"
}

# Omit some information usually printed by git-commit in the summary.
#
# Intentionally uses git directly (does not use git_). It is only meant to be
# used from %prep and that is not allowed to run on shadow build side.
git_commit_brief() {
    local args=("$@")

    git commit --quiet "${args[@]}" || return
    git log -1 --pretty=format:"[%h] %s" --shortstat
    return 0
}

create_patchset_id() {
    local patchset_id=
    if [[ ! $(git_ status --porcelain --untracked-files=no "$OPT_PKGDIR") ]]; then
        patchset_id=$(git_ rev-parse HEAD)
    else
        patchset_id=$(git_ update-index --refresh >/dev/null; git_ stash create) \
            || notice "Cannot save state of Git working tree: git-stash failed."
    fi
    : ${patchset_id:=$NULL_ID}

    printf '%s\n' "$patchset_id"
}

recent_patches() {
    local prune=$([[ $1 == --prune ]] && echo 1)

    if [[ $prune ]]; then
        git_ notes --ref "$PATCHSET_ID_REF" prune
        git_ submodule foreach --quiet --recursive "git notes --ref '$PATCHSET_ID_REF' prune"
    fi

    local reasonable_limit=20
    local args=(--notes="$PATCHSET_ID_REF" --format=%N --max-count="$reasonable_limit")
    {
        # git log warns if notes ref does not exist - suppress stderr
        git_ log "${args[@]}" 2>/dev/null
        git_ submodule foreach --quiet --recursive "git log ${args[*]@Q} 2>/dev/null"
    } |sed '/^$/d'
}

skip_patches() {
    local head=$1
    local patchset_id=$2

    while [[ $(git_ notes --ref "$PATCHSET_ID_REF" show "$head" 2>/dev/null) \
            == "$patchset_id" ]]; do
        head=$(git_ rev-parse "$head"^)
    done

    printf '%s\n' "$head"
}

maybe_fix_package_version() {
    local spec_file=$1

    local version_in_spec=$(sed -n 's/^Version:[[:space:]]*//p' "$spec_file")

    if [[ $OPT_NO_FIX_VERSION ]]; then
        return
    elif [[ ! $OPT_FIX_VERSION ]]; then
        # When used behind Qt Creator, fixing version is enabled implicitly only if the
        # version in spec file is exactly "0" - this is because there is no UI in Qt Creator
        # that would allow to override this.
        if behind_qtcreator && [[ $version_in_spec != 0 ]]; then
            return
        fi
    fi

    if ! inside_git_worktree; then
        if [[ $OPT_FIX_VERSION || $version_in_spec == 0 ]]; then
            notice "Not inside a Git working tree or no revision found, will not fix package version."
        fi
        return
    fi

    local tag
    if [[ $OPT_FIX_VERSION_HINT ]]; then
        tag=$OPT_FIX_VERSION_HINT
    else
        if ! tag=$(git_ describe --tags --abbrev=0 2>/dev/null); then
            notice "No tags describe the HEAD, will not fix package version."
            return
        fi
    fi

    local version=$tag
    version=${version#*/}   # allow tags to have a prefix to allow vendor marking
    version=${version#v}    # some people like to prefix versions with a v

    local tag_sha1=$(git_ rev-parse "$tag^{}")
    [[ $tag_sha1 ]] || fatal "Failed to rev-parse tag '$tag'"

    local prepared=$([[ -r $PREPARE_STAMP && $(<"$PREPARE_STAMP") == "$tag_sha1" ]] && echo 1)

    # Git stash is known to fail on git index which was updated on host previously
    local stash=
    if ! stash=$(git_ update-index --refresh >/dev/null; git_ stash create); then
        fatal "Cannot save state of Git working tree: git-stash failed."
    fi

    local head=${stash:-$(git_ rev-parse HEAD)}

    # Do not consider commits that were created by successful %prep execution as changes
    if [[ $prepared ]]; then
        head=$(skip_patches "$head" "$tag_sha1")
    fi

    local modified_submodules=
    modified_submodules=$(
        describe_if_modified()
        {
            git_() { git "$@"; } # restore default behavior - we are in a submodule

            local stash=
            stash=$(git update-index --refresh >/dev/null; git stash create) || return

            local head=${stash:-$(git rev-parse HEAD)}

            if [[ $prepared ]]; then
                head=$(skip_patches "$head" "$tag_sha1")
            fi

            if [[ $head != $sha1 ]]; then
                local short=$(git rev-list --max-count=1 --abbrev-commit "$head")
                local time=$(git log --max-count=1 --pretty=%ct "$head")
                printf '%s %s %s\n' "$name" "$short" "$time"
            fi
        }
        git_ submodule --quiet foreach --recursive "
            set -o nounset
            $(declare -p prepared tag_sha1 PATCHSET_ID_REF)
            $(declare -f describe_if_modified skip_patches)
            describe_if_modified
        "
    )
    if [[ $? -ne 0 ]]; then
        fatal "Cannot save state of Git submodules: git-stash failed for some."
    fi

    if [[ $head != "$tag_sha1" || $modified_submodules ]]; then
        local branch=$(git_ rev-parse --abbrev-ref HEAD)

        local time=$(git_ log --max-count=1 --pretty=%ct "$head")
        local submodule_info=
        if [[ $modified_submodules ]]; then
            local subname= subsha1= subtime=
            while read subname subsha1 subtime; do
                submodule_info+=+$subname.$subsha1
                if (( subtime > time )); then
                    time=$subtime
                fi
            done <<<"$modified_submodules"
        fi

        local timestamp=$(TZ=UTC date --date=@"$time" +%Y%m%d%H%M%S)
        local sha1sum=$(git_ rev-list --max-count=1 --abbrev-commit "$head")
        version=$version+$branch.$timestamp.$sha1sum$submodule_info
    fi

    version=${version//[^+_[:alnum:]]/.} # replace characters not allowed in the version field

    echo "Setting version: ${version}"
    sed -i "s,^Version:.*,Version: ${version}," "$spec_file"
}

maybe_set_task_name_from_git_branch_name() {
    if [[ $OPT_TASK != git:* ]]; then
        return
    elif ! inside_git_worktree; then
        OPT_TASK=
        return
    fi

    local regex=${OPT_TASK#git:}
    local branch=$(git_ rev-parse --abbrev-ref HEAD)

    if [[ $branch =~ $regex ]]; then
        OPT_TASK=${BASH_REMATCH[0]}
        notice "Task name determined as '$OPT_TASK' (from the current Git branch)"
    else
        OPT_TASK=
    fi
}

# Helper to read XML
read_dom () {
    local IFS=\>
    read -d \< ENTITY CONTENT
    local RET=$?
    TAG_NAME=${ENTITY%% *}
    ATTRIBUTES=${ENTITY#* }
    ATTRIBUTES=${ATTRIBUTES%/}
    return $RET
}

# This slurps the XML and converts tags like <subnet> to $device_subnet
# Also sets device_name and device_type from the attributes
get_device() {
    local FOUND_DEVICE=
    local IN_DEVICE=
    local maintag=
    while read_dom; do
        case $TAG_NAME in
            device )
                maintag=$TAG_NAME
                eval local $ATTRIBUTES
                if [[ "$name" == "$1" ]] ; then
                    FOUND_DEVICE=1
                    IN_DEVICE=1
                    device_name="$name"
                    device_type="$type"
                else
                    IN_DEVICE=
                fi
                ;;
            engine )
                maintag=$TAG_NAME
                eval local $ATTRIBUTES
                ;;
            mac|index|subnet|ip|sshkeypath|sshport|username )
                # Don't process and store nested tags if we're in
                # device with wrong name
                if [[ "$maintag" == "device" ]] && [[ $IN_DEVICE != 1 ]]; then
                    continue
                fi
                eval ${maintag}_$TAG_NAME=\'$CONTENT\'
                ;;
        esac
    done
    # Set this up as it's useful
    if [[ "$device_subnet" ]]; then
        device_ip="$device_subnet".$device_index
    fi
    if [[ ! "$device_sshport" ]]; then
        device_sshport=22
    fi
    if [[ "$device_sshkeypath" == '$HOME/'* ]]; then
        local real_home=$(inside_build_engine && echo "/host_home" || echo "$HOME")
        device_sshkeypath=$real_home/${device_sshkeypath#\$HOME/}
    elif [[ "$device_sshkeypath" ]]; then
        device_sshkeypath=$(get_shared_dir)/${device_sshkeypath}
    fi
    if [[ "$FOUND_DEVICE" == 1 ]]; then return 0; else return 1; fi
}

ensure_device_prepared() {
    # Under build engine the hook is already run by sfdk handling the dbus
    # call below.
    if ! inside_build_engine; then
        local hook_exports=()
        run_hook prepare-device "${hook_exports[*]}" "$OPT_DEVICE" \
            || fatal "The 'prepare-device' hook failed"
        return
    fi

    gdbus call --timeout=30000 \
        --address="$(</run/sdk-setup/sfdk_bus_address)" \
        --dest="$SAILFISH_SDK_SFDK_DBUS_SERVICE" --object-path /device \
        --method org.sailfishos.sfdk.Device.prepare \
        >/dev/null
}

# SSH requires that a private key file is only accessible to the user. Under
# build engine, file permissions under /host_home are too open to comply with
# that.  Loading key from stdin is a way to work this around.
with_specific_or_default_keys() {
    local user=$1 command=("${@:2}")

    local key=$device_sshkeypath
    if [[ $key ]]; then
        [[ -f $key ]] || key=$key/$user
        [[ -f $key ]] || fatal "SSH key not configured for user '$user' on '$device_name'"
    fi

    local use_default_auth=
    # Is agent reachable and does it have any key?
    # (Do not check for exit code 2 - it is not reliable)
    if ssh-add -l &>/dev/null; then
        if [[ $key ]]; then
            # Does the agent have the required key?
            local fingerprint=$(ssh-keygen -l -f "$key" 2>/dev/null)
            if [[ $fingerprint ]] && ssh-add -l |grep -q -F "$fingerprint"; then
                use_default_auth=1
            fi
        else
            # No specific key required
            use_default_auth=1
        fi
    fi

    if [[ $use_default_auth ]]; then
        "${command[@]}"
        return
    fi

    add_specific_or_default_keys() {

        ssh-add() {
            DISPLAY=:0 SSH_ASKPASS=/usr/libexec/sailfish-sdk-setup/ssh-askpass command ssh-add "$@"
        } 

        local key=$1
        if [[ $key ]]; then
            ssh-add -q - <"$key" || return
        elif inside_build_engine; then
            local real_home="/host_home"
            local default_key=
            # See in Qt Creator sources for defaultKeys() in
            # genericlinuxdeviceconfigurationwizardpages.cpp
            for default_key in "$real_home"/.ssh/id_{rsa,ecdsa,ed25519}; do
                if [[ -f $default_key ]]; then
                    ssh-add -q - <"$default_key" || :
                fi
            done
        fi
    }

    ssh-agent bash -c "$(declare -f inside_build_engine add_specific_or_default_keys);
        add_specific_or_default_keys $(quote "$key") && $(quote "${command[@]}")" bash
}

rsync_as() {
    local user="$1";shift
    RSYNC_RSH="ssh -F /etc/ssh/ssh_config.sdk -l $user -p $device_sshport" \
        with_specific_or_default_keys "$user" \
        rsync "$@"
}

ssh_as() {
    local maybe_t_opt=
    if [[ $1 == -t ]]; then
        maybe_t_opt=-t
        shift
    fi
    local user="$1";shift
    with_specific_or_default_keys "$user" \
        ssh $maybe_t_opt -F /etc/ssh/ssh_config.sdk -l $user $device_ip -p $device_sshport "$(quote "$@")"
}

get_spec_setup_subdir() {
    local spec=$1
    local setup_dir=$(sed -n "$spec" -e '
        /^%\(auto\)\?setup\s/ {
            # only care about the argument to "-n"
            s/^.*[[:space:]]\+-n[[:space:]]\+\([^[:space:]]\+\).*$/\1/
            # only care about subdirectories
            s/^[^/]\+\///p
            # the rest of the file can be skipped
            q
        }
    ')

    # in case we got a spec %{tag} out of this, try to expand it
    get_spec_tag "$spec" "$setup_dir"
}

maybe_cd_to_spec_setup_subdir() {
    local spec=$1
    local setup_dir=$(get_spec_setup_subdir "$spec")
    if [[ $setup_dir ]]; then
        is_shadow_build && mkdir -p "$setup_dir"
        cd "$setup_dir"
    fi
}

# handle release tags with format "x" or "x.y", where x and y are
# numbers, leave others untouched
#  get tag
#  check if it has x.y
#  if it is x.y get y and check if y is number
#  if y is number => increment
#  if it only is x check if x is number
#  if x is number => increment
#  else leave untouched
maybe_increment_build_number() {
    [[ $OPT_INC_BUILD_NUMBER -eq 0 ]] && return

    local spec=$1

    local release=$(get_spec_tag "$spec" "%{release}")
    local x=$(echo $release | cut -f 1 -d .)
    local y=$(echo $release | cut -s -f 2 -d .)
    local z=$(echo $release | cut -s -f 3- -d .)

    # more than one dot found => don't touch
    [[ -n "$z" ]] && return

    if [[ -n "$y" ]]; then
        if [[ $y =~ ^-?[0-9]+$ ]]; then
            let new_release=$y+1
            echo "Incrementing release: $x.$y -> $x.$new_release"
            sed -i "s/^Release:.*/Release: $x.$new_release/" "$spec"
            return
        else
            # non-numeric after dot => don't touch
            return
        fi
    fi

    if [[ -n "$x" ]]; then
        if [[ $x =~ ^-?[0-9]+$ ]]; then
            let new_release=$x+1
            echo "Incrementing release: $x -> $new_release"
            sed -i "s/^Release:.*/Release: $new_release/" "$spec"
            return
        fi
    fi
}

quote() {
    [[ $# -gt 0 ]] && printf "%q " "$@" |sed 's/ $//'
}

rpm_quote() {
    quote "$@" |sed 's/\\/&&/g'
}

# Sometime's better to be paranoid than sorry.
clean_dir_no_recurse() {
    local dir=$1
    # Also ensure 'rmdir --parents' is not unnecessary verbose
    find "$dir" -maxdepth 1 ! -type d -delete && rmdir --parents "$dir" &>/dev/null
}

init_wrappers_dir() {
    [[ -e $WRAPPERSDIR ]] || mkdir -p "$WRAPPERSDIR"
    [[ -e $USERWRAPPERSDIR ]] || mkdir -p "$USERWRAPPERSDIR"
    local user_wrapper= command= wrapper=
    for user_wrapper in "${OPT_WRAP[@]}"; do
        command=${user_wrapper%%:*}
        wrapper=${user_wrapper#*:}
        ln -s "$wrapper" "$USERWRAPPERSDIR/$command"
    done
}

remove_wrappers_dir() {
    [[ -d $WRAPPERSDIR ]] && clean_dir_no_recurse "$WRAPPERSDIR"
    [[ -d $USERWRAPPERSDIR ]] && clean_dir_no_recurse "$USERWRAPPERSDIR"
}

# 1. Allows to pass extra arguments to qmake
# 2. Enables shadow build by adding path prefix to the project file, or
# 2.1 passing project directory path if the project file was not passed
# 3. Populates variable cache used by Qt Creator to augment its project model
init_qmake_wrapper() (
    local wrapper=$1
    local extra_args=("${@:2}")

    main() {
        local self=${0#$PWD/}
        local command=${0##*/}

        local path=${0%/*}
        PATH=${PATH/$path:/}

        local real=$(which "$command")
        if [[ ! $real || $real == "$0" ]]; then
            echo "$self: Cannot find real $command" >&2
            exit 1
        fi

        if [[ $IS_SHADOW_BUILD ]]; then
            local subdir=${PWD#$BUILD_DIR}
            local src_dir=$SRC_DIR${subdir:+/$subdir}
            local args=()
            local pro_passed=
            for arg in "$@"; do
                if [[ $arg == *.pro && -f $src_dir/$arg ]]; then
                    args+=("$src_dir/$arg")
                    pro_passed=1
                else
                    args+=("$arg")
                fi
            done
            if [[ ! $pro_passed ]]; then
                args+=("$src_dir")
            fi
            set -- "${args[@]}"
        fi

        local assignments=()
        for arg in "$@"; do
            if [[ $arg =~ ^[a-zA-Z_][a-zA-Z0-9_]*[+*-]?= ]]; then
                assignments+=("$arg")
            fi
        done
        printf '%s\0' "${assignments[@]}" > "$VARIABLES_CACHE"

        echo "$self: Executing real $command:" >&2

        set -x
        "$real" "$@" "${EXTRA_ARGS[@]}"
    }

    init_wrappers_dir
    cat <<END >"$wrapper"
#!/bin/bash

set -o nounset

IS_SHADOW_BUILD=$(is_shadow_build && echo 1)
BUILD_DIR=$(quote "$PWD")
SRC_DIR=$(quote "${OPT_SRC_DIR:-$PWD}")
VARIABLES_CACHE=$STATEDIR/qmake_variables.$OPT_TARGET.cache
EXTRA_ARGS=($(quote "${extra_args[@]}"))

$(declare -f main)

main "\$@"
END
    chmod +x "$wrapper"
)

# 1. Allows to pass extra arguments to cmake
# 2. Enables shadow build by adding path prefix to the project-dir argument, or
# 2.1 passing project directory path if it was not passed
# 3. Allows to suppress cmake not invoked in the desired mode
init_cmake_wrapper() (
    local wrapper=$1

    do_configure() {
        local src_dir=
        if [[ $IS_SHADOW_BUILD ]]; then
            local subdir=${PWD#$BUILD_DIR}
            src_dir=$SRC_DIR${subdir:+/$subdir}
        else
            src_dir=.
        fi

        local args=()
        local maybe_src_arg=()
        for arg in "$@"; do
            # Allow .spec files do shadow build explicitly
            local prefixed_clean=$(readlink --canonicalize-missing "$src_dir/$arg")
            if [[ -f $prefixed_clean/CMakeLists.txt ]]; then
                maybe_src_arg=("$prefixed_clean")
            else
                args+=("$arg")
            fi
        done
        set -- "${args[@]}"

        if [[ $IS_SHADOW_BUILD && ${#maybe_src_arg[*]} -eq 0 ]]; then
            maybe_src_arg=("$src_dir")
        fi

        # NBSP used to prevent Qt Creator treating this as an error when issued for 'cmake'
        echo "$self:${NBSP}Executing real $command:" >&2

        (
            set -x
            "$real" "$@" "${EXTRA_CONFIGURE_ARGS[@]}" ${maybe_src_arg[@]:+"${maybe_src_arg[@]}"}
        )
    }

    do_build() {
        local args=() build_tool_args=()
        while [[ $# -gt 0 ]]; do
            case $1 in
                --build)
                    if [[ $2 != . ]]; then
                        echo "$self: The argument to '--build' passed in RPM SPEC file must be '.' (literally)" >&2
                        exit 1
                    fi
                    args+=("$1" "$2")
                    shift
                    ;;
                --target)
                    if [[ $MAKE_TARGET && $2 != "$MAKE_TARGET" ]]; then
                        echo "$self: The argument to '--target' passed in RPM SPEC file differs from that on command line" >&2
                        exit 1
                    fi
                    args+=("$1" "$2")
                    shift
                    ;;
                --)
                    shift
                    build_tool_args=("$@")
                    shift $#
                    break
                    ;;
                *)
                    args+=("$1")
                    ;;
            esac
            shift
        done

        # NBSP used to prevent Qt Creator treating this as an error when issued for 'cmake'
        echo "$self:${NBSP}Executing real $command:" >&2

        (
            set -x
            "$real" "${args[@]}" "${EXTRA_BUILD_ARGS[@]}" -- "${build_tool_args[@]}" "${EXTRA_BUILD_TOOL_ARGS[@]}"
        )
    }

    main() {
        local self=${0#$PWD/}
        local command=${0##*/}
        local NBSP=$'\xC2\xA0'

        local path=${0%/*}
        PATH=${PATH/$path:/}

        local real=$(which "$command")
        if [[ ! $real || $real == "$0" ]]; then
            echo "$self: Cannot find real $command" >&2
            exit 1
        fi

        if [[ ${1:-} != --build ]]; then
            if [[ ! $ALLOWED_MODE || $ALLOWED_MODE == configure ]]; then
                do_configure "$@"
            else
                # NBSP used to prevent Qt Creator treating this as an error when issued for 'cmake'
                echo "$self:${NBSP}Ignoring $command invoked to create a project buildsystem" >&2
            fi
        else
            if [[ ! $ALLOWED_MODE || $ALLOWED_MODE == build ]]; then
                do_build "$@"
            else
                # NBSP used to prevent Qt Creator treating this as an error when issued for 'cmake'
                echo "$self:${NBSP}Ignoring $command invoked to build the project" >&2
            fi
        fi
    }

    init_wrappers_dir
    cat <<END >"$wrapper"
#!/bin/bash

set -o nounset

IS_SHADOW_BUILD=$(is_shadow_build && echo 1)
BUILD_DIR=$(quote "$PWD")
SRC_DIR=$(quote "${OPT_SRC_DIR:-$PWD}")
ALLOWED_MODE=$(quote "$CMAKE_MODE")
EXTRA_CONFIGURE_ARGS=($(quote "${CMAKE_EXTRA_CONFIGURE_ARGS[@]}"))
EXTRA_BUILD_ARGS=($(quote "${CMAKE_EXTRA_BUILD_ARGS[@]}"))
EXTRA_BUILD_TOOL_ARGS=($(quote "${CMAKE_EXTRA_BUILD_TOOL_ARGS[@]}"))
BUILD_TARGET=$(quote "$CMAKE_BUILD_TARGET")

$(declare -f do_configure)
$(declare -f do_build)
$(declare -f main)

main "\$@"
END
    chmod +x "$wrapper"
)

init_autotools_wrappers() {
    local src_subdir=${1?}
    local src_dir=$OPT_SRC_DIR/$src_subdir

    if [[ -e $src_dir/configure.ac || -e $src_dir/configure.in || -e $src_dir/configure ]]; then
        init_source_tool_wrapper "$src_subdir/configure"
    fi

    if [[ -e $src_dir/autogen.sh ]]; then
        init_source_tool_wrapper --cd "$src_subdir/autogen.sh"
    fi

    local tool=
    for tool in "${AUTOTOOLS[@]}"; do
        init_cd_to_sources_wrapper "$WRAPPERSDIR/$tool"
    done
}

# Create a wrapper for a tool from source tree under the (shadow) build tree.
init_source_tool_wrapper() {
    local opt_cd=
    if [[ $1 == --cd ]]; then
        opt_cd=1
        shift
    fi
    local file=$1

    local subdir=${file%/*}

    mkdir -p "$subdir" || return

    {
        cat <<END
#!/bin/bash
# This file was auto-generated by 'mb2'.
END
        if [[ $opt_cd ]]; then
            cat <<END
cd "$OPT_SRC_DIR/$subdir" || exit
END
        fi

        cat <<END
exec "$OPT_SRC_DIR/$file" "\$@"
END
    } >"$file"
    chmod +x "$file"
}

init_cd_to_sources_wrapper() (
    local wrapper=$1

    main() {
        local self=${0#$PWD/}
        local command=${0##*/}

        local path=${0%/*}
        PATH=${PATH/$path:/}

        local real=$(which "$command")
        if [[ ! $real || $real == "$0" ]]; then
            echo "$self: Cannot find real $command" >&2
            exit 1
        fi

        if [[ $PWD/ != "$BUILD_DIR/"* ]]; then
            # We are not under the build dir. This means we are invoked by
            # another wrapped command that already did cd to sources. Execute
            # the real command silently in this case.
            "$real" "$@"
            return
        fi

        local subdir=${PWD#$BUILD_DIR}
        local src_dir=$SRC_DIR${subdir:+/$subdir}

        echo "$self: Executing real $command:" >&2

        set -x
        cd "$src_dir" && "$real" "$@"
    }

    init_wrappers_dir
    cat <<END >"$wrapper"
#!/bin/bash

set -o nounset

BUILD_DIR=$(quote "$PWD")
SRC_DIR=$(quote "${OPT_SRC_DIR:-$PWD}")

$(declare -f main)

main "\$@"
END
    chmod +x "$wrapper"
)

init_make_compiledb_wrapper() (
    local wrapper=$1
    local extra_args=("${@:2}")

    main() {
        local self=${0#$PWD/}
        local command=${0##*/}

        local path=${0%/*}
        PATH=${PATH/$path:/}

        local real=$(which "$command")
        if [[ ! $real || $real == "$0" ]]; then
            echo "$self: Cannot find real $command" >&2
            exit 1
        fi

        # Check if the makefile defines any of the well known targets to
        # generate compilation database a custom way.
        local make_db=$("$real" "$@" --dry-run --silent \
            --print-data-base --no-builtin-rules --no-builtin-variables)
        local target= candidate_target=
        for candidate_target in "${CANDIDATE_TARGETS[@]}"; do
            if grep -q -e "^${candidate_target//./\\.}:" <<<"$make_db"; then
                target=$candidate_target
                break
            fi
        done

        rm -f "$COMPILEDB_INPUT"

        if [[ $target ]]; then
            # Keep only options and variable assignments, drop targets
            local filtered_args=() arg=
            for arg in "$@"; do
                [[ $arg == -* || $arg == *=* ]] && filtered_args+=("$arg")
            done

            echo "$self: Executing real $command:" >&2

            set -x
            "$real" "${filtered_args[@]}" "$target" "${EXTRA_ARGS[@]}"
        elif which compiledb >/dev/null; then
            echo "$self: Wrapping real $command with compiledb:" >&2

            set -x
            compiledb --no-build make "$@" "${EXTRA_ARGS[@]}"
        else
            echo "$self: Executing real $command in dry-run mode:" >&2

            (
                set -x
                "$real" "$@" --dry-run --always-make --print-directory --keep-going \
                    "${EXTRA_ARGS[@]}"
            ) >"$COMPILEDB_INPUT"
        fi
    }

    init_wrappers_dir
    cat <<END >"$wrapper"
#!/bin/bash

set -o nounset

CANDIDATE_TARGETS=(compiledb compile_commands.json)
COMPILEDB_INPUT=$(quote "$COMPILEDB_INPUT")
EXTRA_ARGS=($(quote "${extra_args[@]}"))

$(declare -f main)

main "\$@"
END
    chmod +x "$wrapper"
)

init_patch_wrapper() (
    local wrapper=$1
    local patchset_id=$2
    local host_git=$(which git)
    
    # Use git from host
    git() { SBOX_DISABLE_MAPPING=1 "$HOST_GIT" "$@"; }

    # We cannot simply use git-am as it does not accept all the patch
    # options we may get passed. So we need to parse the possible info
    # from the patch, apply using plain patch and then commit.
    parse_info() {
        local -n out_author=$1
        local -n out_author_email=$2
        local -n out_subject=$3
        local -n out_date=$4
        local -n out_message=$5

        local tmpdir=
        parse_info_cleanup() (
            trap 'echo cleaning up...' INT TERM HUP
            if [[ $tmpdir ]]; then
                rm -rf "$tmpdir"
            fi
        )
        trap 'parse_info_cleanup; trap - RETURN' RETURN
        trap 'return 1' INT TERM HUP

        # git runs from host with mapping disabled, so a path needs to be used
        # which does not require mapping.
        tmpdir=$(mktemp -d --tmpdir="$STATEDIR") || return
        local message_file=$tmpdir/message

        local info=
        # Reads patch from stdin
        info=$(git mailinfo "$message_file" /dev/null) || return

        out_author=$(sed -n 's/^Author: //p' <<<$info)
        out_author_email=$(sed -n 's/^Email: //p' <<<$info)
        out_subject=$(sed -n 's/^Subject: //p' <<<$info)
        out_date=$(sed -n 's/^Date: //p' <<<$info)
        out_message=$(<"$message_file")
    }

    main() {
        local self=${0#$PWD/}
        local command=${0##*/}

        local path=${0%/*}
        PATH=${PATH/$path:/}

        local real=$(which "$command")
        if [[ ! $real || $real == "$0" ]]; then
            echo "$self: Cannot find real $command" >&2
            return 1
        fi

        if ! git --help &>/dev/null; then
            echo "$self: The 'git' command is not available under the build environment. " \
                "Resorting to plain 'patch'." >&2
            
            "$real" "$@"
            return
        fi

        if [[ $(git status --porcelain --untracked-files=no) ]]; then
            echo "$self: Refusing to patch dirty work tree - commit or reset your changes first." >&2
            return 1
        fi

        local patch=
        IFS='' read -r -d '' patch

        local author=
        local author_email=
        local subject=
        local date=
        local message=
        if ! parse_info author author_email subject date message <<<$patch; then
            echo "$self: Failed to parse patch info - resorting to plain 'patch'." >&2
        fi

        : ${subject:="(no subject)"}
        local full_message=$PREPARE_LEADER$subject$'\n\n'$message

        local untracked_old=$(git status --porcelain |sed -n 's/^?? //p' |sort -u)

        "$real" "$@" <<<$patch || return

        local untracked_now=$(git status --porcelain |sed -n 's/^?? //p' |sort -u)
        local untracked_added=()
        readarray -t untracked_added < <(join -v 2 \
            <(printf '%s' "$untracked_old") <(printf '%s' "$untracked_now"))
        if [[ ${#untracked_added[*]} -gt 0 ]]; then
            if ! git add "${untracked_added[@]}"; then
                echo "$self: Failed to git-add some of the newly added files" >&2
            fi
        fi

        # No submodule handling here - patches spanning multiple submodules are
        # clearly a bad practice and as such completely unsupported!
        local changes_in_submodules=$(git submodule foreach --quiet \
            git status --untracked-files=no --porcelain)
        if [[ $changes_in_submodules ]]; then
            echo "$self: Unsupported: Patch spans across git submodules" >&2
            return 1
        fi

        local extra_args=()

        local patch_id=$(printf '%s' "$patch" |git hash-object --stdin)
        local patch_file=${PATCHES[$patch_id]:-}
        if [[ $patch_file ]]; then
            extra_args+=(--trailer "$SCRAPE_TRAILER=$SCRAPE_FILENAME=$patch_file")
        fi

        export GIT_AUTHOR_NAME=${author:-$FAKE_GIT_AUTHOR}
        export GIT_AUTHOR_EMAIL=${author_email:-$FAKE_GIT_EMAIL}
        export GIT_AUTHOR_DATE=$date
        export GIT_COMMITTER_NAME=$FAKE_GIT_AUTHOR
        export GIT_COMMITTER_EMAIL=$FAKE_GIT_EMAIL

        if ! git_commit_brief --all --allow-empty --message "$full_message" \
                "${extra_args[@]}"; then
            echo "$self: Failed to git-commit applied changes" >&2
            return 1
        fi

        git notes --ref "$PATCHSET_ID_REF" add --message "$PATCHSET_ID"
    }

    local -A PATCHES=()
    local mode= type= id= name=
    while read -r -d '' mode type id name; do
        [[ $name = *.patch ]] && PATCHES[$id]=$name
    done < <(command git ls-tree -z --full-tree 'HEAD:rpm')

    init_wrappers_dir
    cat <<END >"$wrapper"
#!/bin/bash

set -o nounset

STATEDIR=$(quote "$STATEDIR")
FAKE_GIT_AUTHOR=$(quote "$FAKE_GIT_AUTHOR")
FAKE_GIT_EMAIL=$(quote "$FAKE_GIT_EMAIL")
PREPARE_LEADER=$(quote "$PREPARE_LEADER")
PATCHSET_ID_REF=$(quote "$PATCHSET_ID_REF")
PATCHSET_ID=$(quote "$patchset_id")
SCRAPE_TRAILER=$(quote "$SCRAPE_TRAILER")
SCRAPE_FILENAME=$(quote "$SCRAPE_FILENAME")
HOST_GIT=$(quote "$host_git")
WE=$(quote "$WE")

$(declare -p PATCHES)

$(declare -f git)
$(declare -f git_commit_brief)
$(declare -f parse_info)
$(declare -f main)

main "\$@"
END
    chmod +x "$wrapper"
)

init_simple_wrapper() {
    local wrapper=$1
    local extra_args=("${@:2}")

    init_wrappers_dir
    {
        cat <<'END'
#!/bin/bash
self=${0#$PWD/}
command=${0##*/}
NBSP=$'\xC2\xA0'

path=${0%/*}
PATH=${PATH/$path:/}

real=$(which "$command")
if [[ ! $real || $real == "$0" ]]; then
    echo "$self: Cannot find real $command" >&2
    exit 1
fi

# NBSP used to prevent Qt Creator treating this as an error when issued for 'make'
echo "$self:${NBSP}Executing real $command:" >&2

set -x

"$real" "$@" \
END
        quote "${extra_args[@]}"
        echo
    } >"$wrapper"
    chmod +x "$wrapper"
}

init_noop_wrapper() {
    local wrapper=$1

    init_wrappers_dir
    cat <<'END' >"$wrapper"
#!/bin/bash
set -o nounset
self=${0#$PWD/}
command=${0##*/}
NBSP=$'\xC2\xA0'
# NBSP used to prevent Qt Creator treating this as an error when issued for 'qmake'
echo "$self:${NBSP}Ignoring $command" >&2
END
    chmod +x "$wrapper"
}

switch_to_snapshot() {
    local opt_reset=outdated
    while [[ $# -gt 0 ]]; do
        case $1 in
            --reset=*)
                opt_reset=${1#*=}
                ;;
            *)
                fatal "Internal error: Unexpected argument to switch_to_snapshot: '$1'"
                ;;
        esac
        shift
    done

    if [[ $OPT_SNAPSHOT == *'%{task}'* ]]; then
        notice "Use of '%{task}' to determine snapshot name is deprecated"
        OPT_SNAPSHOT=${OPT_SNAPSHOT//%{task\}/$OPT_TASK}
        : ${OPT_SNAPSHOT:=%pool}
    fi

    if [[ $OPT_SNAPSHOT =~ ^%pool(,[0-9]+)?$ ]]; then
        local pool_size=${BASH_REMATCH[1]#,}
        : ${pool_size:=$DEF_SNAPSHOT_POOL_SIZE}

        local lock=$STATEDIR/snapshot.lock lock_fd=
        exec {lock_fd}> "$lock" || return
        if ! flock --nonblock "$lock_fd"; then
            fatal "Failed to flock '$lock'"
        fi
        OPT_TARGET=$(sdk-manage target reserve --reset-reused="$opt_reset" "$OPT_ORIGINAL_TARGET" \
            "$OPT_ORIGINAL_TARGET.pool.XXX" "$lock" "$pool_size") || return

        notice "Using the '$OPT_TARGET' snapshot of the build target"
    else
        OPT_TARGET=$OPT_ORIGINAL_TARGET.$OPT_SNAPSHOT
        sdk-manage target snapshot --reset="$opt_reset" "$OPT_ORIGINAL_TARGET" "$OPT_TARGET" || return
    fi
}

# Intentionally uses git directly (does not use git_). It is only meant to be
# used from %prep and that is not allowed to run on shadow build side.
init_commit_prep_changes_hook() {
    local hook=$1
    local patchset_id=$2
    local host_git=$(which git)

    # Use git from host
    git() { SBOX_DISABLE_MAPPING=1 "$HOST_GIT" "$@"; }

    commit_all() {
        if [[ $(git status --untracked-files=no --porcelain --ignore-submodules) ]]; then
            # No way to auto add everything except submodule changes, so add
            # everything first, then reset submodules
            git add -u
            local submodule=
            for submodule in $(git submodule foreach --quiet 'printf "%s\n" "$displaypath"'); do
                git reset --quiet -- "$submodule"
            done

            local message="${PREPARE_LEADER}(%prep)"
            message+=$'\n\n'"These changes were introduced by other means than applying patches."

            # Use --allow-empty just to be sure.
            if ! git_commit_brief --allow-empty --message "$message" \
                    --trailer "$SCRAPE_TRAILER=$SCRAPE_DISCARD"; then
                echo "$self: Failed to git-commit changes done by the %prep section" >&2
                return 1
            fi

            git notes --ref "$PATCHSET_ID_REF" add --message "$PATCHSET_ID"
        fi
    }

    main() {
        local self=${0#$PWD/}

        export GIT_AUTHOR_NAME=$FAKE_GIT_AUTHOR
        export GIT_AUTHOR_EMAIL=$FAKE_GIT_EMAIL
        export GIT_COMMITTER_NAME=$FAKE_GIT_AUTHOR
        export GIT_COMMITTER_EMAIL=$FAKE_GIT_EMAIL

        local ec=0

        commit_all || ec=1
        local submodule=
        for submodule in $(git submodule foreach --quiet --recursive 'printf "%s\n" "$displaypath"'); do
            (cd "$submodule" && commit_all) || ec=1
        done

        return $ec
    }

    init_wrappers_dir
    cat <<END >"$hook"
#!/bin/bash

set -o nounset

FAKE_GIT_AUTHOR=$(quote "$FAKE_GIT_AUTHOR")
FAKE_GIT_EMAIL=$(quote "$FAKE_GIT_EMAIL")
PREPARE_LEADER=$(quote "$PREPARE_LEADER")
SCRAPE_TRAILER=$(quote "$SCRAPE_TRAILER")
SCRAPE_DISCARD=$(quote "$SCRAPE_DISCARD")
PATCHSET_ID_REF=$(quote "$PATCHSET_ID_REF")
PATCHSET_ID=$(quote "$patchset_id")
HOST_GIT=$(quote "$host_git")

$(declare -f git)
$(declare -f git_commit_brief)
$(declare -f commit_all)
$(declare -f main)

main
END
    chmod +x "$hook"
}

sign_rpms__prepare() {
    if ! [[ -x $SDK_RPMSIGN ]]; then 
        sign_rpms_native__prepare
    fi
}

sign_rpms() {
    local spec=$1

    local rpms=$(find_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$spec")
    [[ $rpms ]] || fatal "No package found"

    local hook_exports=()
    run_hook pre-sign "${hook_exports[*]}" ${rpms} || fatal "The 'pre-sign' hook failed"

    if [[ -x $SDK_RPMSIGN ]]; then
        $SDK_RPMSIGN ${rpms}
    else
        sign_rpms_native ${rpms}
    fi

    [[ $? -eq 0 ]] || { fatal "Failed to sign packages"; }
}

sign_rpms_native__prepare() {
    if [[ ! $OPT_PACKAGE_SIGN_USER ]]; then
        fatal "Cannot sign packages: No signing user specified."
    fi

    if inside_build_engine; then
        local shared_key=${OPT_SHARED_DIR}/gnupg/${OPT_PACKAGE_SIGN_USER}.key
        if [[ ! -f $shared_key ]]; then
            fatal "Cannot sign packages: Internal error: No GPG key shared for the user '$OPT_PACKAGE_SIGN_USER'."
        fi

        local shared_key_id=$(gpg2 --with-colons --list-packets "$shared_key" \
                |sed -n 's/:user ID packet: "\(.*\)"/\1/p')
        [[ $shared_key_id ]] || return

        local key_fingerprints=$(gpg2 --with-colons --list-secret-keys --fingerprint "$shared_key_id" \
                |awk -F: '$1 == "fpr" { print $10 }')
        local key_fingerprint=
        for key_fingerprint in $key_fingerprints; do
            if ! gpg2 --batch --delete-secret-and-public-key "$key_fingerprint"; then
                fatal "Cannot sign packages: Internal error: Failed to delete old GPG key copy '$key_fingerprint'."
            fi
        done

        if ! gpg2 --quiet --import "${shared_key}"; then 
            fatal "Cannot sign packages: Internal error: Failed to import GPG key from file '$shared_key'."
        fi
    fi

    if ! gpg2 --list-secret-keys "$OPT_PACKAGE_SIGN_USER" &>/dev/null; then
        fatal "Cannot sign packages: No GPG key found for the user '$OPT_PACKAGE_SIGN_USER'."
    fi

    local passphrase=
    if [[ $OPT_PACKAGE_SIGN_PASSPHRASE_FILE ]]; then
        passphrase=$(<"$OPT_PACKAGE_SIGN_PASSPHRASE_FILE")
    else
        passphrase=$OPT_PACKAGE_SIGN_PASSPHRASE
    fi

    if ! gpg2 --batch --status-fd 1 --sign --local-user "$OPT_PACKAGE_SIGN_USER" \
            --output - --passphrase-fd 3 3<<<"$passphrase" </dev/null &>/dev/null; then
        if [[ ! $passphrase ]]; then
            fatal "Cannot sign packages: The selected GPG key is passphrase protected" \
                "and no passphrase was specified."
        else
            fatal "Cannot sign packages: The given passphrase is not a correct passphrase" \
                "for the '$OPT_PACKAGE_SIGN_USER' key."
        fi
    fi
}

sign_rpms_native() {
    local rpms=("$@")

    local tmpdir=
    sign_rpms_native_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        if [[ $tmpdir ]]; then
            rm -rf "$tmpdir"
        fi
    )
    trap 'sign_rpms_native_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    local extra_args=(--define "_gpg_name $OPT_PACKAGE_SIGN_USER")

    local passphrase=
    if [[ $OPT_PACKAGE_SIGN_PASSPHRASE_FILE ]]; then
        passphrase=$(<"$OPT_PACKAGE_SIGN_PASSPHRASE_FILE")
    else
        passphrase=$OPT_PACKAGE_SIGN_PASSPHRASE
    fi

    if [[ $passphrase ]]; then
        tmpdir=$(mktemp -d) || return
        chmod 0700 "$tmpdir" || return
        local passphrase_file=$tmpdir/passphrase
        cat >"$passphrase_file" <<<"$passphrase" || return
        extra_args+=(--define "_gpg_sign_cmd_extra_args --batch --passphrase-file '$passphrase_file'")
    fi

    rpmsign --addsign "${rpms[@]}" "${extra_args[@]}" </dev/null
}

run_build_init__process_args() {
    if [[ $# -gt 0 ]]; then
        OPT_PRO_FILE_OR_DIR=$1
        shift
    fi
    if [[ $# -gt 0 ]]; then
        fatal "Unexpected argument: '$1'"
    fi

    handle_pro_file_or_dir_option
}

run_build_init() {
    :
}

run_build_requires__process_args() {
    BUILD_REQUIRES_MODE=
    BUILD_REQUIRES_NOREFRESH=

    if [[ $OPT_SNAPSHOT ]]; then
        BUILD_REQUIRES_NOREFRESH=1
    fi
    while [[ "$1" ]]; do
        case "$1" in
            --refresh) shift
                BUILD_REQUIRES_NOREFRESH= ;;
            --no-refresh) shift
                BUILD_REQUIRES_NOREFRESH=1 ;;
            reset|--reset)
                if [[ $1 == --reset ]]; then
                    notice "The '--reset' option is deprecated. Use 'reset' (non-option) instead."
                fi
                shift
                if [[ ! $OPT_SNAPSHOT ]]; then
                    fatal "The 'reset' command is only effective with build target snapshots."
                fi
                BUILD_REQUIRES_MODE=reset ;;
            pull) shift
                BUILD_REQUIRES_MODE=pull ;;
            diff) shift
                if [[ ! $OPT_SNAPSHOT ]]; then
                    fatal "The 'diff' command is only available with build target snapshots."
                fi
                BUILD_REQUIRES_MODE=diff ;;
            *)
                short_usage quit
                ;;
        esac
    done
    if [[ ! $BUILD_REQUIRES_MODE ]]; then
        notice "Omitting 'pull' is deprecated. Pass 'pull' explicitly."
        BUILD_REQUIRES_MODE=pull
    fi
    if [[ $BUILD_REQUIRES_MODE == pull ]]; then
        OPT_NEEDSPEC=1
    fi
}

run_build_requires() {
    local locked=
    run_build_requires_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        [[ $locked ]] && global_lock_release
    )
    trap 'run_build_requires_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    if [[ $BUILD_REQUIRES_MODE == diff ]]; then
        sdk-manage target package-diff "$OPT_TARGET" "$OPT_ORIGINAL_TARGET"
        return
    fi

    if [[ $BUILD_REQUIRES_MODE == reset && ! $OPT_SPEC ]]; then
        locked=1
        global_lock_acquire
        if ! sdk-manage target snapshot --reset=force "$OPT_ORIGINAL_TARGET" "$OPT_TARGET"; then
            fatal "Failed to reset build target snapshot '$OPT_TARGET' to '$OPT_ORIGINAL_TARGET'"
        fi
        return
    fi

    local args=()
    if [[ $BUILD_REQUIRES_MODE == reset ]]; then
        args+=(--reset=force)
    else
        args+=(--reset=outdated)
    fi
    [[ $BUILD_REQUIRES_NOREFRESH ]] || args+=(--refresh)

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"
    drop_windows_linefeeds "$TMP_SPEC"
    maybe_fix_package_version "$TMP_SPEC"

    OPT_PULL_BUILD_REQUIRES=1 maybe_verify_target_dependencies "$TMP_SPEC" "${args[@]}"
}

run_apply__process_args() {
    APPLY_REVERSE=
    while [[ "$1" ]]; do
        case "$1" in
            -R) shift
                APPLY_REVERSE=1 ;;
            *)
                short_usage quit
                ;;
        esac
    done
}

run_apply() {
    local message="Applying"
    local common_op=()
    if [[ $APPLY_REVERSE ]]; then
        message="Reversing"
        common_op+=(-R)
    fi

    has_windows_linefeeds "$OPT_SPEC" && common_op+=(--binary)

    flatten_spec "$OPT_SPEC" > "$TMP_SPEC"
    drop_windows_linefeeds "$TMP_SPEC"

    local to_apply=
    local auto_applied=$(grep -q "^%autosetup" "$TMP_SPEC" \
        && ! grep -q "^%autopatch" "$TMP_SPEC" \
        && ! grep -q "^%patch[0-9]" "$TMP_SPEC" \
        && echo 1)
    if [[ $auto_applied ]]; then
        to_apply=$(sed -n 's/^Patch\([0-9]*\):.*/\1/p' "$TMP_SPEC")
        common_op+=($(sed -n 's/^%autosetup\( .*\)\? \(-p *[0-9]\+\).*/\2/p' "$TMP_SPEC"))
    else
        to_apply=$(sed -n 's/^%patch\([0-9]*\).*/\1/p' "$TMP_SPEC")
    fi

    if [[ $APPLY_REVERSE ]]; then
        to_apply=$(tac <<<"$to_apply")
    fi

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        for p in $to_apply; do
            if [[ ! $auto_applied ]]; then
                op="$(sed -n "s/^%patch$p[\ \t]\+\(.*\)/\1/p" "$TMP_SPEC")"
            fi
            patch_file="$(sed -n "s/^Patch$p:[\ \t]\+\(.*\)/\1/p" "$TMP_SPEC")"
            echo "$message $patch_file" >&2
            patch "${common_op[@]}" $op < "$OPT_PKGDIR/$patch_file" >&2 \
                || fatal "$message $patch_file failed, exiting"
        done
    )
}

run_build__process_args() {
    BUILD_DEBUG_ARGS=(--define "debug_package %{nil}")
    BUILD_JOBS=
    BUILD_NOPREP_ARGS=(--noprep)
    BUILD_NOCHECK_ARGS=()
    BUILD_NORPMLINT=${SAILFISH_SDK_FRONTEND:+1}
    BUILD_EXTRA_ARGS=()
    BUILD_SIGN=
    while [[ "$1" ]]; do
        case "$1" in
            -d|--enable-debug) shift
                BUILD_DEBUG_ARGS=() ;;
            -j*)
                # support giving -j with and without space between
                # it and the 'n'
                if [[ ${#1} -gt 2 ]]; then
                    BUILD_JOBS=${1:2}; shift
                else
                    [[ -z "$2" ]] && short_usage quit
                    BUILD_JOBS="$2"; shift 2;
                fi
                ;;
            -p|--prepare|--doprep)
                if [[ $1 == --doprep ]]; then
                    notice "The '--doprep' option is deprecated. Use '--prepare|-p' instead."
                fi
                shift
                BUILD_NOPREP_ARGS=()
                ;;
            -s|--sign) shift
                BUILD_SIGN=1
                ;;
            --no-check) shift
                BUILD_NOCHECK_ARGS=(--nocheck)
                BUILD_NORPMLINT=1
                ;;
            --) shift
                break
                ;;
            *)
                if [[ ! $OPT_PRO_FILE_OR_DIR && -e $1 ]]; then
                    OPT_PRO_FILE_OR_DIR="${1%%+(/)}"
                    shift
                else
                    break
                fi
                ;;
        esac
    done
    BUILD_EXTRA_ARGS=("$@")
    handle_pro_file_or_dir_option

    if is_shadow_build && [[ ! $BUILD_NOPREP_ARGS ]]; then
        fatal "The '--prepare' option is not available with shadow builds." \
            "Use the 'prepare' command on the source tree instead."
    fi

    [[ $BUILD_JOBS ]] || BUILD_JOBS=$(getconf _NPROCESSORS_ONLN)

    [[ ! $BUILD_SIGN ]] || sign_rpms__prepare
}

run_build() {
    local extra_args=()

    maybe_increment_build_number "$OPT_SPEC"

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    if [[ $BUILD_NOPREP_ARGS ]]; then
        sed -i -e '/^%\(auto\)\?patch/d' "$TMP_SPEC"
    fi

    try_to_make_changelog >> "$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    maybe_verify_target_dependencies "$TMP_SPEC"

    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    if is_shadow_build; then
        local subdir=$(get_spec_setup_subdir "$TMP_SPEC")
        : ${subdir:=.}
        local src_dir=$OPT_SRC_DIR/$subdir

        init_qmake_wrapper $WRAPPERSDIR/qmake
        init_cmake_wrapper $WRAPPERSDIR/cmake

        if [[ -e $src_dir/Makefile.am ]]; then
            init_autotools_wrappers "$subdir"
        fi
    fi

    local patchset_id=
    if [[ ! $BUILD_NOPREP_ARGS && ! $OPT_NO_VCS_APPLY ]] && inside_git_worktree; then
        if [[ $(recent_patches --prune) ]]; then
            fatal "Refusing to prepare repeatedly - reset previous changes first."
        fi
        if [[ $(git_ status --porcelain --untracked-files=no) ]]; then
            fatal "Refusing to prepare dirty work tree - commit or reset your changes first."
        fi
        patchset_id=$(create_patchset_id)
        init_patch_wrapper "$WRAPPERSDIR/patch" "$patchset_id"
        init_commit_prep_changes_hook "$WRAPPERSDIR/$WE-commit-prep-changes" "$patchset_id"
        extra_args+=(--define "__spec_prep_post $WE-commit-prep-changes; %{___build_post}")
    fi

    [[ $BUILD_NOPREP_ARGS ]] || rm -f "$PREPARE_STAMP"

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="$WRAPPERSDIR:$USERWRAPPERSDIR:$PATH" rpmbuild \
            --build-in-place --target=$build_tgt \
            "${BUILD_DEBUG_ARGS[@]}" \
            --define "_smp_mflags -j$(rpm_quote "$BUILD_JOBS")" \
            --define "_rpmdir $OPT_OUTPUTDIR" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_rpmfilename %%{name}-%%{version}-%%{release}.%%{arch}.rpm" \
            --define "_buildhost $(get_hostname)" \
            --define "__patch patch" \
            "${EXTENDED_PRE_INSTALL_LOCK_ARGS[@]}" \
            --buildroot="$buildroot" \
            "${BUILD_NOPREP_ARGS[@]}" "${BUILD_NOCHECK_ARGS[@]}" \
            "${extra_args[@]}" \
            -bb \
            "$TMP_SPEC" \
            "${BUILD_EXTRA_ARGS[@]}"
    ) || return

    [[ $patchset_id ]] && printf '%s\n' "$patchset_id" >"$PREPARE_STAMP"

    [[ $OPT_PACKAGE_TIMELINE ]] || prune_older_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC"
    [[ ! $BUILD_SIGN ]] || sign_rpms "$TMP_SPEC" || return

    local hook_exports=()
    local rpms=$(find_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC")
    run_hook post-package "${hook_exports[*]}" ${rpms} || fatal "The 'post-package' hook failed"

    if [[ ! $BUILD_NORPMLINT ]]; then
        local check_suites=$CHECK_RPMLINT
        run_check__process_args_helper "" "$check_suites" ""
        run_check --already-locked
    fi

    return 0
}

run_prepare() {
    local extra_args=()

    ! is_shadow_build || fatal "The 'prepare' command must be used on the source tree."

    maybe_increment_build_number "$OPT_SPEC"
    # TODO maybe add toggle here for disabling patch applying?
    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    maybe_verify_target_dependencies "$TMP_SPEC"

    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    local patchset_id=
    if [[ ! $OPT_NO_VCS_APPLY ]] && inside_git_worktree; then
        if [[ $(recent_patches --prune) ]]; then
            fatal "Refusing to prepare repeatedly - reset previous changes first."
        fi
        if [[ $(git_ status --porcelain --untracked-files=no) ]]; then
            fatal "Refusing to prepare dirty work tree - commit or reset your changes first."
        fi
        patchset_id=$(create_patchset_id)
        init_patch_wrapper "$WRAPPERSDIR/patch" "$patchset_id"
        init_commit_prep_changes_hook "$WRAPPERSDIR/$WE-commit-prep-changes" "$patchset_id"
        extra_args+=(--define "__spec_prep_post $WE-commit-prep-changes; %{___build_post}")
    fi

    rm -f "$PREPARE_STAMP"

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="$WRAPPERSDIR:$USERWRAPPERSDIR:$PATH" rpmbuild \
            --build-in-place --target=$build_tgt \
            --define "_smp_mflags -j$BUILD_JOBS" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "__patch patch" \
            "${extra_args[@]}" \
            -bp --short-circuit \
            "$TMP_SPEC"
    ) || return

    [[ $patchset_id ]] && printf '%s\n' "$patchset_id" >"$PREPARE_STAMP"

    return 0
}

run_scrape__process_args() {
    SCRAPE_DRY_RUN=
    SCRAPE_KEEP=
    SCRAPE_OUTPUT_DIR=$OPT_PKGDIR
    SCRAPE_STABLE=

    while [[ $# -gt 0 ]]; do
        case $1 in
            -n|--dry-run)
                SCRAPE_DRY_RUN=1
                ;;
            --keep)
                SCRAPE_KEEP=1
                ;;
            -o|--output-dir)
                [[ $2 ]] || fatal "Option expects argument: '$1'"
                SCRAPE_OUTPUT_DIR=$2
                shift
                ;;
            --stable)
                SCRAPE_STABLE=1
                ;;
            *)
                fatal "Unexpected argument: '$1'"
                ;;
        esac
        shift
    done
}

run_scrape() {

    if [[ $(cd "$SCRAPE_OUTPUT_DIR" && ls *.patch) ]] 2>/dev/null; then
        local status=
        if ! status=$(git_ status --porcelain -- "$SCRAPE_OUTPUT_DIR"/*.patch 2>/dev/null); then
            fatal "Cannot scrapees: You have existing patches in output directory" \
                "that does not seem to be under version control."
        elif grep -e '^.[^ ]' --quiet <<<$status; then
            fatal "Cannot scrapees: You have unstaged changes to the existing patches."
        fi
    fi

    local dirty=$(run_scrape__dirty_submodules)
    if [[ $dirty ]]; then
        fatal "The following submodules have uncommitted changes to files:" \
            $'\n\n'"$dirty" \
            $'\n\n'"Commit or reset changes to files in submodules first and try again." \
            "(Do not commit the changes to submodule's sha1 itself!)"
    fi

    local incomplete=$(run_scrape__summary --author="$FAKE_GIT_AUTHOR <$FAKE_GIT_EMAIL>")
    if [[ $incomplete ]]; then
        fatal "The following commits were created from plain patches and use fake author" \
            "information. Correct the authorship and try again:" $'\n\n'"$incomplete"
    fi

    if [[ $SCRAPE_DRY_RUN ]]; then
        run_scrape__summary
        return
    fi

    local tmpdir=
    run_scrape_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        if [[ $tmpdir ]]; then
            rm -rf "$tmpdir"
        fi
    )
    trap 'run_scrape_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    tmpdir=$(mktemp -d) || return

    run_scrape__format_patch "$tmpdir" || return

    local have_patches=
    if [[ $(cd "$tmpdir" && ls *.patch) ]] 2>/dev/null; then
        have_patches=1
        # Strip possible PREPARE_LEADER from filenames, enforce max filename length
        local leader_dashed=${PREPARE_LEADER//+([^[:alnum:]])/-}
        local patch=
        for patch in "$tmpdir"/*.patch; do
            patch=$(basename "$patch")
            local stripped=$(sed "s/^\([0-9]\+\)-\?${leader_dashed}-\?/\1-/" <<<$patch)
            if [[ ${#stripped} -gt $PATCH_FILENAME_MAX_LENGTH ]]; then
                stripped=${stripped:0:(PATCH_FILENAME_MAX_LENGTH-6)}.patch
            fi
            if [[ $stripped != "$patch" ]]; then
                mv --no-clobber "$tmpdir/$patch" "$tmpdir/$stripped" || return
            fi
        done

        # Strip PREPARE_LEADER from subject lines
        run_scrape__strip_leader_from_subject "$tmpdir"/*.patch

        if [[ $SCRAPE_STABLE ]]; then
            run_scrape__rename_patches "$tmpdir"/*.patch
        fi

        run_scrape__strip_trailer "$tmpdir"/*.patch
    fi

    mkdir -p "$SCRAPE_OUTPUT_DIR" || return
    find "$SCRAPE_OUTPUT_DIR" -maxdepth 1 -name '*.patch' -delete

    if [[ $have_patches ]]; then
        local relative_output_dir=$(realpath --relative-to="$PWD" "$SCRAPE_OUTPUT_DIR")
        mv --target-directory "$SCRAPE_OUTPUT_DIR" "$tmpdir"/*.patch \
            && { ls -1 "$relative_output_dir"/*.patch ||:; } || return
    fi

    if [[ ! $SCRAPE_KEEP ]]; then
        git_ submodule update --recursive || :
    fi
}

run_scrape__dirty_submodules() (
    print_if_dirty() {
        local status=$(git status --porcelain --ignore-submodules --untracked-files=no)
        ! [[ $status ]] || printf ' %s\n' "$displaypath"
    }
    git_ submodule --quiet foreach --recursive "
        $(declare -f print_if_dirty)
        print_if_dirty
    "
)

run_scrape__summary() (
    local rev_list_args=("$@")

    local regexp='^'$SCRAPE_TRAILER':[[:space:]]*'$SCRAPE_DISCARD'$'
    rev_list_args+=(--invert-grep --grep="$regexp")

    summary() {
        local format=" > %h %s"
        local count=$(git rev-list "${rev_list_args[@]}" --count "$sha1..")
        [[ $count -eq 0 ]] && return
        printf "%s %s..%s (%d):\n" "$displaypath" \
            "$(git rev-parse --short "$sha1")" \
            "$(git rev-parse --short HEAD)" \
            "$count"
        git log "${rev_list_args[@]}" --format="$format" "$sha1.."
    }
    git_ submodule --quiet foreach --recursive "
        set -o nounset
        $(declare -p rev_list_args)
        $(declare -f summary)
        summary
    "
)

run_scrape__strip_leader_from_subject()
{
    local files=("$@")

    local leader_quoted=${PREPARE_LEADER//\*/\\*}

    # Drop the leader from the Subject and re-do word wrapping if any
    sed -i "${files[@]}" -e '
        /^Subject: / {
            s/ '"$leader_quoted"' */ /;

            # Leader not found, nothing to do here
            T;

            # Join continuation lines
            : next;
            N;
            s/\n\s\+/ /;
            t next;

            # Word wrap
            s/\(.\{,78\}\)\( \|$\)/\1\n\2/g;

            # Avoid extra blank line
            s/\n\n/\n/;

            P; D;
        }'
}

run_scrape__rename_patches() {
    local patches=("$@")

    local trailer= filename=
    for patch in "${patches[@]}"; do
        trailer=$(git interpret-trailers --parse <"$patch" \
            |sed -n "s/^$SCRAPE_TRAILER:\s*//p")
        # Should someone include it in a patch by mistake...
        trailed=$(tail -n1 <<<$trailer)
        if [[ $trailer = "$SCRAPE_FILENAME="* ]]; then
            filename=${trailer#"$SCRAPE_FILENAME="}
            mv "$patch" "$tmpdir/$filename"
        fi
    done
}

# Strip our trailer - ensure to drop a possible preceding blank line
# but do not drop the blank line immediately following the "Subject:"
# header.
# TODO Use git-interpret-trailers once it knows to remove trailers
run_scrape__strip_trailer() {
    local patches=("$@")

    sed -i -e '
        # Read the whole subject, which may be line-wrapped, and the
        # following blank line at once and print it unchanged
        /^Subject: / {
            : repeat
            N
            s/\n [^\n]\+$/&/
            t repeat
            b
        }

        # Read our trailer and a possible preceding blank line at once and
        # drop it
        /^$/ N
        /\(^\|\n\)'"$SCRAPE_TRAILER"':/d
    ' -- "${patches[@]}"
}

run_scrape__format_patch() (
    local tmp_output_dir=$1

    local regexp='^'$SCRAPE_TRAILER':[[:space:]]*'$SCRAPE_DISCARD'$'
    local rev_list_args=(--invert-grep --grep="$regexp")
    local format_patch_args=("${rev_list_args[@]}" "${OPT_GIT_FORMAT_PATCH_ARGS[@]}")

    doit() {
        local sequence_file=$tmp_output_dir/.seq

        local sequence_number=$([[ -r "$sequence_file" ]] && cat "$sequence_file" || echo 0)
        printf '%s' "$((sequence_number + 1))" > "$sequence_file"

        local start_number=$((sequence_number * 100))
        # First sequence should start with 1 like the plain git-format-patch does
        [[ $start_number -gt 0 ]] || start_number=1

        git format-patch --quiet --output-directory "$tmp_output_dir" \
            --filename-max-length "$((PATCH_FILENAME_MAX_LENGTH + ${#PREPARE_LEADER}))" \
            --start-number "$start_number" "${format_patch_args[@]}" "$sha1"
    }
    git_ submodule --quiet foreach --recursive "
        set -o nounset
        $(declare -p PATCH_FILENAME_MAX_LENGTH PREPARE_LEADER)
        $(declare -p tmp_output_dir format_patch_args)
        $(declare -f doit)
        doit
    "
)

run_cmake__process_args() {
    CMAKE_MODE=
    CMAKE_EXTRA_CONFIGURE_ARGS=()
    CMAKE_EXTRA_BUILD_ARGS=()
    CMAKE_EXTRA_BUILD_TOOL_ARGS=()
    CMAKE_BUILD_TARGET=

    case $1 in
        --build)
            CMAKE_MODE=build

            [[ $2 ]] || fatal "Option expects argument: '--build'"
            [[ $2 == . ]] || fatal "The argument to '--build' must be '.' (literally)"
            shift 2

            while [[ $# -gt 0 ]]; do
                case $1 in
                    --target)
                        [[ $2 ]] || fatal "Option expects argument: '--target'"
                        CMAKE_BUILD_TARGET=$2
                        shift
                        ;;
                    --)
                        shift
                        break
                        ;;
                    *)
                        CMAKE_EXTRA_BUILD_ARGS+=("$1")
                        ;;
                esac
                shift
            done
            CMAKE_EXTRA_BUILD_TOOL_ARGS=("$@")
            ;;
        *)
            CMAKE_MODE=configure

            while [[ $# -gt 0 ]]; do
                case $1 in
                    --)
                        shift
                        break
                        ;;
                    *)
                        if [[ ! $OPT_PRO_FILE_OR_DIR && -e $1 ]]; then
                            OPT_PRO_FILE_OR_DIR=${1%%+(/)}
                            shift
                        else
                            break
                        fi
                        ;;
                esac
            done

            CMAKE_EXTRA_CONFIGURE_ARGS=("$@")
            handle_pro_file_or_dir_option
            ;;
    esac
}

run_cmake() {
    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    case $CMAKE_MODE in
        configure)
            maybe_verify_target_dependencies "$TMP_SPEC"
            init_cmake_wrapper $WRAPPERSDIR/cmake
            init_noop_wrapper $WRAPPERSDIR/make
            ;;
        build)
            init_cmake_wrapper $WRAPPERSDIR/cmake
            init_simple_wrapper $WRAPPERSDIR/make "${CMAKE_EXTRA_BUILD_TOOL_ARGS[@]}" \
                ${CMAKE_BUILD_TARGET:+"$CMAKE_BUILD_TARGET"}
            ;;
        *)
            fatal "Internal error: Invalid value '$CMAKE_MODE' in CMAKE_MODE"
            ;;
    esac

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="${WRAPPERSDIR}:${USERWRAPPERSDIR}:${PATH}" rpmbuild \
            --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "__make make" \
            -bc --short-circuit \
            "$TMP_SPEC"
    )
}

run_qmake__process_args() {
    QMAKE_EXTRA_ARGS=()
    while [[ "$1" ]]; do
        case "$1" in
            --) shift
                break
                ;;
            *)
                if [[ ! $OPT_PRO_FILE_OR_DIR && -e $1 ]]; then
                    OPT_PRO_FILE_OR_DIR="${1%%+(/)}"
                    shift
                else
                    break
                fi
                ;;
        esac
    done
    QMAKE_EXTRA_ARGS=("$@")
    handle_pro_file_or_dir_option
}

run_qmake() {
    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    # This is a good time to verify the target dependencies as per mb
    maybe_verify_target_dependencies "$TMP_SPEC"

    init_qmake_wrapper $WRAPPERSDIR/qmake "${QMAKE_EXTRA_ARGS[@]}"
    init_noop_wrapper $WRAPPERSDIR/make

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="$WRAPPERSDIR:$USERWRAPPERSDIR:$PATH" rpmbuild \
            --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "__make make" \
            -bc --short-circuit \
            "$TMP_SPEC"
    )
}

run_compiledb() {
    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    # This is a good time to verify the target dependencies as per mb
    maybe_verify_target_dependencies "$TMP_SPEC"

    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    if is_shadow_build; then
        local subdir=$(get_spec_setup_subdir "$TMP_SPEC")
        : ${subdir:=.}
        local src_dir=$OPT_SRC_DIR/$subdir

        init_qmake_wrapper $WRAPPERSDIR/qmake
        init_cmake_wrapper $WRAPPERSDIR/cmake

        if [[ -e $src_dir/Makefile.am ]]; then
            init_autotools_wrappers "$subdir"
        fi
    fi

    init_make_compiledb_wrapper $WRAPPERSDIR/make "$@"

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="$WRAPPERSDIR:$USERWRAPPERSDIR:$PATH" rpmbuild \
            --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "__make make" \
            -bc --short-circuit \
            "$TMP_SPEC" \
            || return

        if [[ -e "$COMPILEDB_INPUT" ]]; then
            compiledb --parse "$COMPILEDB_INPUT" && rm -f "$COMPILEDB_INPUT"
        fi
    )
}

run_make() {
    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    init_noop_wrapper $WRAPPERSDIR/qmake
    CMAKE_MODE=build
    CMAKE_EXTRA_BUILD_TOOL_ARGS=("$@")
    init_cmake_wrapper $WRAPPERSDIR/cmake
    init_simple_wrapper $WRAPPERSDIR/make "$@"

    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build env PATH="$WRAPPERSDIR:$USERWRAPPERSDIR:$PATH" rpmbuild \
            --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "__make make" \
            -bc --short-circuit \
            "$TMP_SPEC"
    )
}

run_make_install() {
    # Internal, intentionally undocumented
    local maybe_nocheck=--nocheck
    if [[ ${1:-} == --check ]]; then
        local maybe_nocheck=
        shift
    fi

    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    # Install to buildroot which should be rsync'ed to /opt/sdk/$package on device
    (
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build rpmbuild --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_buildhost $(get_hostname)" \
            --define "_skip_install_processing 1" \
            "${EXTENDED_PRE_INSTALL_LOCK_ARGS[@]}" \
            --buildroot="$buildroot" \
            -bi --short-circuit $maybe_nocheck \
            "$TMP_SPEC"
    ) || return

    mkdir -p "$buildroot" # hack for self-test to pass with stubbed sb2
    stat -c %Y "$buildroot" > "$INSTALL_STAMP"
}

run_package__process_args() {
    PACKAGE_NOCHECK_ARGS=()
    PACKAGE_SIGN=

    while [[ $# -ne 0 ]]; do
        case $1 in
            --no-check)
                PACKAGE_NOCHECK_ARGS=(--nocheck)
                ;;
            -s|--sign) shift
                PACKAGE_SIGN=1
                ;;
            *)
                fatal "Unexpected argument: $1"
                ;;
        esac
        shift
    done
    
    [[ ! $PACKAGE_SIGN ]] || sign_rpms__prepare
}

run_package() {
    local build_tgt=$(sb2_build gcc -dumpmachine)
    build_tgt=${build_tgt%-gnueabi}

    flatten_spec "$OPT_SPEC" >"$TMP_SPEC"

    try_to_make_changelog >> "$TMP_SPEC"

    drop_windows_linefeeds "$TMP_SPEC"

    maybe_fix_package_version "$TMP_SPEC"

    (
        maybe_increment_build_number "$TMP_SPEC"
        maybe_cd_to_spec_setup_subdir "$TMP_SPEC"
        sb2_build rpmbuild --build-in-place --target=$build_tgt \
            --define "noecho 1" \
            --define "_rpmdir $OPT_OUTPUTDIR" \
            --define "_sourcedir $OPT_PKGDIR" \
            --define "_rpmfilename %%{name}-%%{version}-%%{release}.%%{arch}.rpm" \
            --define "_buildhost $(get_hostname)" \
            "${EXTENDED_PRE_INSTALL_LOCK_ARGS[@]}" \
            --buildroot="$buildroot" \
            -bb --noprep --nobuildstage "${PACKAGE_NOCHECK_ARGS[@]}" --noclean \
            "$TMP_SPEC"
    ) || return

    [[ $OPT_PACKAGE_TIMELINE ]] || prune_older_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC"
    [[ ! $PACKAGE_SIGN ]] || sign_rpms "$TMP_SPEC"

    local hook_exports=()
    local rpms=$(find_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC")
    run_hook post-package "${hook_exports[*]}" ${rpms} || fatal "The 'post-package' hook failed"
}

run_build_shell() {
    local maintain=

    while (($# > 0)); do
        case $1 in
            --maintain)
                maintain=1
                ;;
            --)
                break
                ;;
            -*)
                fatal "Unrecognized option: '$1'"
                ;;
            *)
                break
                ;;
        esac
        shift
    done

    if [[ $maintain ]]; then
        if inside_build_engine; then
            local session_start=$(date +%s)
        fi

        sb2_maintain "$@" || return

        if inside_build_engine; then
            sdk-manage target sync "$OPT_TARGET" "$session_start"
        fi

        return 0
    else
        sb2_build "$@"
    fi
}

sb2_build() {
    local sb2_args=()
    if [[ $OPT_BUILD_SHELL_ARGS ]]; then
        read -a sb2_args <<<$OPT_BUILD_SHELL_ARGS || return
    fi

    if [[ $# -gt 0 ]]; then
        oomadvice sb2 -t $OPT_TARGET "${sb2_args[@]}" -- "$@"
    else
        # Keep PS1 aligned with `sdk-manage maintain`
        sb2 -t $OPT_TARGET "${sb2_args[@]}" -- env PS1="[$OPT_ORIGINAL_TARGET] \W $ " /bin/bash --noprofile --norc
    fi
}

sb2_maintain() {
    local sb2_args=(-m sdk-install -R)
    if [[ $OPT_BUILD_SHELL_MAINTENANCE_ARGS ]]; then
        read -a sb2_args <<<$OPT_BUILD_SHELL_MAINTENANCE_ARGS || return
    fi

    if [[ $# -gt 0 ]]; then
        sb2 -t $OPT_TARGET "${sb2_args[@]}" -- "$@"
    else
        # Keep PS1 aligned with `sdk-manage maintain`
        sb2 -t $OPT_TARGET "${sb2_args[@]}" -- env PS1="[$OPT_ORIGINAL_TARGET] \W # " /bin/bash --noprofile --norc
    fi
}

run_deploy() {
    [[ $device_type ]] || fatal "No device specified"
    [[ -e $TMP_SPEC ]] || fatal "No previous build found"

    local method_opt= all_opt= debug_opt= patterns=() dry_run_opt=
    while [[ $# -ne 0 ]]; do
        case $1 in
            --sdk | --pkcon | --zypper | --zypper-dup | --rsync | --manual )
                [[ ! $method_opt ]] || fatal "Cannot combine '$1' and '$method_opt'"
                method_opt=$1
                ;;
            --all )
                all_opt=$1
                ;;
            --debug )
                debug_opt=1
                ;;
            -n | --dry-run )
                dry_run_opt=1
                ;;
            -- )
                shift
                patterns+=("$@")
                break
                ;;
            *)
                patterns+=("$@")
                break
                ;;
        esac
        shift
    done

    if [[ ! $debug_opt ]]; then
        patterns+=('-*-debug@(info|source)')
    fi

    [[ ! $all_opt || $method_opt != --rsync ]] || fatal "Cannot use '--all' with '--rsync'"
    [[ ${method_opt:-} ]] || fatal "Argument expected"

    run_deploy_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        global_lock_release
    )
    trap 'run_deploy_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    global_lock_acquire

    # rsync method is special
    if [[ $method_opt == --rsync ]]; then
        local name=$(get_spec_tag "$TMP_SPEC" "%{name}")

        if [[ $dry_run_opt ]]; then
            echo "The RPM \"%buildroot\" directory would be copied to the device as \"/opt/sdk/$name\""
            return
        fi

        if [[ ! -d $buildroot || ! $(ls -A "$buildroot") ]]; then
            fatal "Nothing to deploy. Maybe you forgot to use the \"make-install\" command."
        fi

        if [[ -e $INSTALL_STAMP && $(stat -c %Y "$buildroot") -ne $(<"$INSTALL_STAMP") ]]; then
            fatal "The content of the RPM \"%buildroot\" directory does not match this build."
        fi

        ensure_device_prepared

        rsync_as $device_username -av ${buildroot}/. $device_ip:/opt/sdk/$name \
            || { echo "Deploy failed"; return 1; }
        return
    fi

    local rpms=$(find_rpms $all_opt "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC" \
            "${patterns[@]}")
    [[ $rpms ]] || fatal "No package found or none matched the given criteria"
    local device_rpms=$(sed "s,^$OPT_OUTPUTDIR/,RPMS/," <<<"$rpms")

    if [[ $dry_run_opt ]]; then
        echo "The following packages would be considered for deployment:"
        sort <<<"$rpms" |sed 's/^/  /'
        echo
        if [[ $method_opt != --zypper* ]]; then
            return
        fi
    fi

    local remote_username=$device_username
    local method=()
    local ok_retcodes=(0)

    ensure_device_prepared

    case $method_opt in
        --sdk )
            method=(sdk-deploy-rpm $device_rpms)
            ;;
        --pkcon )
            method=(pkcon --plain --noninteractive install-local $device_rpms)
            ;;
        --zypper )
            remote_username=root
            method=(zypper --non-interactive in --force --allow-unsigned-rpm
                --details ${dry_run_opt:+--dry-run --download-only} $device_rpms)
            ok_retcodes=(0 106)
            ;;
        --zypper-dup )
            remote_username=root
            method=(zypper --non-interactive --no-gpg-checks --plus-repo RPMS
                dup --from ~plus-repo-1 --details ${dry_run_opt:+--dry-run --download-only})
            ok_retcodes=(0 106)
            ;;
        --manual )
            ;;
        *)
            fatal "Internal error: Invalid deployment method: '$1'"
            ;;
    esac

    rsync_as "$remote_username" -av ${rpms} "$device_ip:RPMS/" || return

    local retcode=0

    if [[ $method_opt != --manual ]]; then
        ssh_as -t "$remote_username" "${method[@]}"
        set_contains ok_retcodes "$?"
        retcode=$?
        ssh_as "$remote_username" rm -f ${device_rpms}
    fi

    [[ $retcode -eq 0 ]] || { echo "Deploy failed"; return 1; }
}

run_undeploy__process_args() {
    UNDEPLOY_ALL=
    UNDEPLOY_DRY_RUN=
    UNDEPLOY_METHOD_OPT=
    UNDEPLOY_PATTERNS=()

    [[ $device_type ]] || fatal "No device specified"

    while [[ $# -ne 0 ]]; do
        case $1 in
            --sdk | --pkcon | --rpm | --rsync | --zypper )
                if [[ $UNDEPLOY_METHOD_OPT ]]; then
                    fatal "Cannot combine '$1' and '$UNDEPLOY_METHOD_OPT'"
                fi
                UNDEPLOY_METHOD_OPT=$1
                ;;
            --all )
                UNDEPLOY_ALL=1
                ;;
            -n | --dry-run )
                UNDEPLOY_DRY_RUN=1
                ;;
            -- )
                shift
                UNDEPLOY_PATTERNS+=("$@")
                break
                ;;
            * )
                while [[ $# -ne 0 ]]; do
                    case $1 in
                        # Try to avoid bad surprices
                        -n | --dry-run )
                            fatal "Options must precede patterns"
                            ;;
                        * )
                            UNDEPLOY_PATTERNS+=("$1")
                            ;;
                    esac
                    shift
                done
                break
                ;;
        esac
        shift
    done

    [[ $UNDEPLOY_METHOD_OPT ]] || fatal "Argument expected"

    if [[ ! $UNDEPLOY_ALL ]]; then
        [[ -e $TMP_SPEC ]] || fatal "No previous build found"
        OPT_NEEDSPEC=1
    fi
}

run_undeploy() {
    [[ -e $TMP_SPEC ]] || fatal "No previous build found"

    local user=
    case $UNDEPLOY_METHOD_OPT in
        --sdk | --pkcon | --rsync )
            user=$device_username
            ;;
        --rpm | --zypper )
            user=root
            ;;
        * )
            fatal "Internal error: Invalid undeploy method '$UNDEPLOY_METHOD_OPT'"
            ;;
    esac

    ensure_device_prepared

    local candidates=
    if [[ $UNDEPLOY_METHOD_OPT == --rsync ]]; then
        # Consider all directories under /opt/sdk
        candidates=$(ssh_as "$user" ls /opt/sdk)
        candidates=$(sed 's,.*,& /opt/sdk/&,' <<<"$candidates")
    else
        # Consider all packages with matching BUILDHOST
        local format='%{BUILDHOST} %{SOURCERPM} %{NAME} %{EVR}\n'
        candidates=$(ssh_as "$user" rpm -q --queryformat "$format" -a)
        candidates=$(
            [[ $candidates ]] || exit 0
            wanted_buildhost=$(get_hostname)
            while read buildhost rest; do
                [[ $buildhost == "$wanted_buildhost" ]] && printf '%s\n' "$rest"
            done <<<"$candidates"
        )
    fi

    # candidates now contains [{src_name, name, maybe_evr}], optionally filter
    # by src_name, discard src_name.
    if [[ $UNDEPLOY_ALL ]]; then
        candidates=$(cut -d' ' -f2- <<<"$candidates")
    else
        candidates=$(
            [[ $candidates ]] || exit 0
            local wanted_src_name=$(get_spec_tag "$TMP_SPEC" "%{name}")
            while read src_name rest; do
                [[ $src_name =~ ^"$wanted_src_name"(-[0-9].*)?$ ]] && printf '%s\n' "$rest"
            done <<<"$candidates"
        )
    fi

    # candidates now contains [{name, maybe_evr}], optionally filter by name
    if [[ ${#UNDEPLOY_PATTERNS[*]} -gt 0 ]]; then
        candidates=$(
            [[ $candidates ]] || exit 0
            while read name maybe_evr; do
                if match_multi "$name" "${UNDEPLOY_PATTERNS[@]}"; then
                    printf "%s %s\n" "$name" "$maybe_evr"
                fi
            done <<<"$candidates"
        )
    fi

    if [[ $UNDEPLOY_DRY_RUN ]]; then
        if [[ ! $candidates ]]; then
            echo "No candidate for removal found"
            return
        else
            echo "The following packages would be removed:"
            local name= evr=
            while read name evr; do
                printf '%s%s\n' "$name" "${evr:+-$evr}"
            done <<<"$candidates"
        fi
    fi

    # candidates now contains [{name, maybe_evr}], discard maybe_evr
    candidates=$(cut -d' ' -f1 <<<"$candidates")

    if [[ $UNDEPLOY_METHOD_OPT != --rsync ]]; then
        if ! ssh_as "$user" rpm --test -e $candidates; then
            return 1
        fi
    fi

    if [[ $UNDEPLOY_DRY_RUN ]]; then
        return 0
    fi

    case $UNDEPLOY_METHOD_OPT in
        --sdk )
            ssh_as -t "$user" sdk-deploy-rpm --undeploy $candidates
            ;;
        --pkcon )
            ssh_as -t "$user" pkcon --plain --noninteractive remove $candidates
            ;;
        --rpm )
            ssh_as -t "$user" rpm -e $candidates
            ;;
        --rsync )
            local unsafe_to_rm_f=$(grep --invert-match '^/opt/sdk/[^.][^/]*$' <<<"$candidates")
            if [[ $unsafe_to_rm_f ]]; then
                fatal "Internal error: Refusing to remove '$unsafe_to_rm_f'"
            fi
            ssh_as "$user" rm -rf $candidates
            ;;
        --zypper )
            ssh_as -t "$user" zypper --non-interactive rm $candidates
            ;;
    esac
}

run_check__process_args() {
    CHECK_LIST_SUITES=
    check_levels=
    check_suites=
    check_artefacts=()
    while [[ "$1" ]]; do
        case "$1" in
            --list-suites) shift
                CHECK_LIST_SUITES=1
                ;;
            -l|--levels) shift
                [[ $1 ]] || short_usage quit
                check_levels=$1
                shift
                ;;
            -s|--suites) shift
                [[ $1 ]] || short_usage quit
                check_suites=$1
                shift
                ;;
            --) shift
                check_artefacts+=(${@:+"$@"})
                break
                ;;
            -*)
                short_usage quit
                ;;
            *)
                check_artefacts+=("$1")
                shift
                ;;
        esac
    done

    if [[ ! $CHECK_LIST_SUITES ]]; then
        check_levels=$(combine_set_update_recipes "$OPT_CHECK_LEVELS" "$check_levels")
        check_suites=$(combine_set_update_recipes "$OPT_CHECK_SUITES" "$check_suites")

        run_check__process_args_helper "$check_levels" "$check_suites" \
            ${check_artefacts:+"${check_artefacts[@]}"}

        notice "Testing on levels: ${CHECK_LEVELS[*]}"
        notice "Testing with suites: ${CHECK_SUITES[*]}"

        if set_contains CHECK_LEVELS "$CHECK_LEVEL_PACKAGE" && [[ ${#CHECK_PACKAGE_ARTEFACTS[*]} -eq 0 ]] \
            || set_contains CHECK_LEVELS "$CHECK_LEVEL_UNIT"; then
            OPT_NEEDSPEC=1
        fi
    fi
}

run_check__process_args_helper() {
    local check_levels=$1
    local check_suites=$2
    local check_artefacts=("${@:3}")

    CHECK_SOURCE_ARTEFACT=
    CHECK_PACKAGE_ARTEFACTS=()
    for artefact in ${check_artefacts:+"${check_artefacts[@]}"}; do
        if [[ ! -e $artefact ]]; then
            fatal "No such file or directory: '$artefact'"
        fi
        if [[ $artefact == *.rpm && -f $artefact ]]; then
            CHECK_PACKAGE_ARTEFACTS+=("$artefact")
        elif [[ -d $artefact ]]; then
            if [[ $CHECK_SOURCE_ARTEFACT ]]; then
                fatal "Multiple artefacts of type 'source tree' specified"
            fi
            CHECK_SOURCE_ARTEFACT=$artefact
        else
            fatal "Unhandled type of artefact '$artefact'"
        fi
    done

    if [[ $CHECK_SOURCE_ARTEFACT && ${#CHECK_PACKAGE_ARTEFACTS[*]} -eq 0 ]]; then
        CHECK_LEVELS=($CHECK_LEVEL_SOURCE)
    elif [[ ! $CHECK_SOURCE_ARTEFACT && ${#CHECK_PACKAGE_ARTEFACTS[*]} -ne 0 ]]; then
        CHECK_LEVELS=($CHECK_LEVEL_PACKAGE)
    else
        CHECK_LEVELS=($CHECK_LEVEL_SOURCE $CHECK_LEVEL_PACKAGE)
    fi
    if [[ $check_levels ]]; then
        local expanded_check_levels=() level=
        for level in ${check_levels//,/ }; do
            local op=
            if [[ $level = [-+]* ]]; then
                op=${level:0:1}
                level=${level:1}
            fi
            case $level in
                $CHECK_LEVEL_STATIC)
                    expanded_check_levels+=($op$CHECK_LEVEL_SOURCE $op$CHECK_LEVEL_PACKAGE) ;;
                $CHECK_LEVEL_DYNAMIC)
                    expanded_check_levels+=($op$CHECK_LEVEL_UNIT $op$CHECK_LEVEL_SYSTEM) ;;
                $CHECK_LEVEL_SOURCE|$CHECK_LEVEL_PACKAGE|$CHECK_LEVEL_UNIT|$CHECK_LEVEL_SYSTEM)
                    expanded_check_levels+=($op$level) ;;
                *)
                    fatal "Not a recognized test level '$level'" ;;
            esac
        done
        set_update CHECK_LEVELS ${expanded_check_levels[*]}
    fi
    if [[ ${#CHECK_LEVELS[*]} -eq 0 ]]; then
        fatal "No test level selected"
    fi

    local rpmvalidation_suites_info=$(rpmvalidation --target "$OPT_TARGET" --list-suites)
    RPMVALIDATION_ALL_SUITES=($(awk '{print $1}' <<<"$rpmvalidation_suites_info"))
    local rpmvalidation_essential_suites=($(awk -v essential="$CHECK_ESSENTIAL" \
        '($2 == essential) { print $1 }' <<<"$rpmvalidation_suites_info"))
    local known_suites=($CHECK_RPMSPEC $CHECK_RPMLINT ${RPMVALIDATION_ALL_SUITES[*]})

    CHECK_SUITES=($CHECK_RPMSPEC $CHECK_RPMLINT ${rpmvalidation_essential_suites[*]})
    if [[ $check_suites ]]; then
        for suite in ${check_suites//[,+-]/ }; do
            if ! set_contains known_suites "$suite"; then
                fatal "Not a known test suite '$suite'"
            fi
        done
        set_update CHECK_SUITES ${check_suites//,/ }
    fi
    if [[ ${#CHECK_SUITES[*]} -eq 0 ]]; then
        fatal "No test suite selected"
    fi
}

run_check() {
    local no_lock=
    if [[ $1 == --already-locked ]]; then
        no_lock=1
        shift
    fi
    if [[ $CHECK_LIST_SUITES ]]; then
        local US=$'\x1F'
        print_suite() ( IFS=$US; printf '%s\n' "$*"; )
        local raw=$(
            print_suite NAME ESSENTIAL LEVELS URL DESCRIPTION
            {
                print_suite "$CHECK_RPMSPEC" "$CHECK_ESSENTIAL" "$CHECK_LEVEL_UNIT" "-" \
                    "Execute the %check section of the RPM SPEC file"

                print_suite "$CHECK_RPMLINT" "$CHECK_ESSENTIAL" "$CHECK_LEVEL_PACKAGE" "-" \
                    "Check for common packaging problems with rpmlint"

                local name= essential= url= description=
                rpmvalidation --target "$OPT_TARGET" --list-suites \
                    |while read name essential url description; do
                        print_suite "$name" "$essential" "$CHECK_LEVEL_PACKAGE" "$url" "$description"
                    done
            } |sort --field-separator "$US" --key 1,1
        )

        local odd=$(cut -d "$US" -f 1-4 <<<"$raw" |column --table --separator "$US")
        local even=$(cut -d "$US" -f 5 <<<"$raw")
        paste -d $'\n' <(cat <<<"$odd") <(cat <<<"$even") |sed '2~2s/^/    /; 2d'
        return
    fi

    run_check_cleanup() (
        trap 'echo cleaning up...' INT TERM HUP
        [[ $no_lock ]] || global_lock_release
    )
    trap 'run_check_cleanup; trap - RETURN' RETURN
    trap 'return 1' INT TERM HUP

    [[ $no_lock ]] || global_lock_acquire
    local EXTENDED_PRE_INSTALL_LOCK_ARGS=()

    local rc=0 done_count=0

    if set_contains CHECK_LEVELS "$CHECK_LEVEL_UNIT"; then
        if set_contains CHECK_SUITES "$CHECK_RPMSPEC"; then
            let done_count++
            if [[ -d $STATEDIR ]]; then
                run_make_install --check || rc=1
            else
                notice "The \"$CHECK_RPMSPEC\" suite needs to be run from the top of a build tree"
                rc=1
            fi
        fi
    fi

    if set_contains CHECK_LEVELS "$CHECK_LEVEL_PACKAGE"; then
        local selected_rpmvalidation_suites=()
        set_intersect selected_rpmvalidation_suites RPMVALIDATION_ALL_SUITES CHECK_SUITES
        selected_rpmvalidation_suites=$(IFS=,; printf '%s' "${selected_rpmvalidation_suites[*]}")

        local have_selected_suites=
        if [[ $selected_rpmvalidation_suites ]] \
                || set_contains CHECK_SUITES "$CHECK_RPMLINT"; then
            have_selected_suites=1
        fi

        local packages=
        if [[ $have_selected_suites ]]; then
            packages=(${CHECK_PACKAGE_ARTEFACTS:+"${CHECK_PACKAGE_ARTEFACTS[@]}"})
            if [[ ${#packages[*]} -eq 0 ]]; then
                if [[ ! -e $TMP_SPEC ]]; then
                    notice "No previous build found under the current working directory" \
                        "and no package to verify was specified explicitly"
                else
                    packages=($(find_rpms "$OPT_OUTPUTDIR" "$OPT_PKGDIR" "$TMP_SPEC" \
                        '-*-debug@(source|info)'))
                    if [[ ${#packages[*]} -eq 0 ]]; then
                        notice "No packages found and none specified explicitly"
                    fi
                fi
            fi
        fi

        local package=

        if [[ $selected_rpmvalidation_suites ]]; then
            for package in "${packages[@]}"; do
                let done_count++
                rpmvalidation -t "$OPT_TARGET" --suites "$selected_rpmvalidation_suites" "$package" \
                    || rc=1
            done
        fi

        if set_contains CHECK_SUITES "$CHECK_RPMLINT"; then
            for package in "${packages[@]}"; do
                let done_count++
                sb2 -t "$OPT_TARGET" -m sdk-build+pp rpmlint "$package" || rc=1
            done
        fi
    fi

    # The other levels are currently unimplemented

    if [[ $done_count -eq 0 ]]; then
        fatal "None of the selected test suites operate on the selected test levels. Nothing to do."
    fi

    return $rc
}

get_shared_dir() {
    echo "$OPT_SHARED_DIR"
}

inside_build_engine() [[ -f /etc/mer-sdk-vbox ]]
inside_git_worktree() { git_ log -1 HEAD &>/dev/null; }

behind_qtcreator() {
    # Consider this the default frontend for backward compatibility
    inside_build_engine && [[ ! $SAILFISH_SDK_FRONTEND || $SAILFISH_SDK_FRONTEND == qtcreator ]]
}
behind_sfdk() {
    inside_build_engine && [[ $SAILFISH_SDK_FRONTEND == sfdk ]]
}

# Symlink creation under VirtualBox shared folder shared from Windows requires special permissions
# which are not always granted
maybe_symlink_create() {
    local target=$1 maybe_symlink=$2
    if inside_build_engine; then
        ln -sfn "$target" "$maybe_symlink" &>/dev/null || printf '%s' "$target" > "$maybe_symlink"
    else
        ln -sfn "$target" "$maybe_symlink"
    fi
}

maybe_symlink_read() {
    local maybe_symlink=$1
    if [[ -L $maybe_symlink ]]; then
        readlink -f "$maybe_symlink"
    else
        cat "$maybe_symlink"
    fi
}

# this is same as basename $0
ME=${0/#*\//}

# this is what is closer to the user
if inside_build_engine && [[ $1 != --self-test && ! $MB2_SELF_TEST_RUN ]]; then
    WE=sfdk
else
    WE=mb2
fi

DEF_TASK='git:^[[:alpha:]]{2,5}[[:digit:]]{3,}'
DEF_SNAPSHOT=default
DEF_SNAPSHOT_POOL_SIZE=5
DEF_GIT_FORMAT_PATCH_ARGS=(--no-numbered --zero-commit --no-signature --full-index)

# option variables
OPT_SPEC=
OPT_TARGET=
OPT_ORIGINAL_TARGET=
OPT_SNAPSHOT=$DEF_SNAPSHOT
OPT_NO_SNAPSHOT=
OPT_DEVICE=
OPT_OUTPUTDIR=$(readlink -f ./RPMS)
OPT_PACKAGE_TIMELINE=
OPT_SEARCH_OUTPUTDIR=
OPT_SEARCH_OUTPUTDIR_VERBOSITY=verbose
OPT_OUTPUT_PREFIX=
OPT_PKGDIR=$PWD/rpm
OPT_PRO_FILE_OR_DIR=
OPT_SRC_DIR=
OPT_NEEDSPEC=
OPT_NEEDTARGET=
OPT_FIX_VERSION=
OPT_FIX_VERSION_HINT=
OPT_NO_FIX_VERSION=
OPT_CHANGELOG=
OPT_CHANGELOG_ARGS=
OPT_SHARED_DIR=/etc/mersdk/share
OPT_INC_BUILD_NUMBER=0
OPT_PULL_BUILD_REQUIRES=1
OPT_WRAP=()
OPT_BUILD_SHELL_ARGS=
OPT_BUILD_SHELL_MAINTENANCE_ARGS=
OPT_CHECK_LEVELS=
OPT_PACKAGE_SIGN_USER=
OPT_PACKAGE_SIGN_PASSPHRASE=
OPT_PACKAGE_SIGN_PASSPHRASE_FILE=
OPT_CHECK_SUITES=
OPT_HOOKS_DIR=
OPT_NO_VCS_APPLY=
OPT_GIT_FORMAT_PATCH_ARGS=("${DEF_GIT_FORMAT_PATCH_ARGS[@]}")

STATEDIR=$PWD/.$WE
TMP_SPEC=$STATEDIR/spec
COMPILEDB_INPUT=$STATEDIR/compiledb-input.txt
PREPARE_STAMP=$STATEDIR/prepared
INSTALL_STAMP=$STATEDIR/installed

SDK_RPMSIGN=/usr/libexec/sailfish-sdk-setup/sdk-rpmsign

CHECK_LEVEL_SOURCE=source
CHECK_LEVEL_PACKAGE=package
CHECK_LEVEL_STATIC=static
CHECK_LEVEL_UNIT=unit
CHECK_LEVEL_SYSTEM=system
CHECK_LEVEL_DYNAMIC=dynamic

CHECK_ESSENTIAL=Essential
CHECK_OPTIONAL=Optional

CHECK_RPMSPEC=rpmspec
CHECK_RPMLINT=rpmlint

AUTOTOOLS=(autoreconf autoconf autoheader aclocal automake autopoint libtoolize)

# The working directory inside build engine is noexec and symlink creation under VirtualBox
# shared folder shared from Windows requires special permissions
if inside_build_engine; then
    WRAPPERSDIR=$HOME/.mb2/wrappers${PWD#"$SAILFISH_SDK_SRC1_MOUNT_POINT"}
    USERWRAPPERSDIR=$HOME/.mb2/user-wrappers${PWD#"$SAILFISH_SDK_SRC1_MOUNT_POINT"}
else
    WRAPPERSDIR=$STATEDIR/wrappers
    USERWRAPPERSDIR=$STATEDIR/user-wrappers
fi

# Virtualbox environment will install in this hardcoded location
if inside_build_engine; then
    # Intersperse with quotes to avoid /usr/lib/rpm/check-buildroot to complain
    # when building sdk-setup under build engine
    buildroot=/home/''deploy/installroot
else
    buildroot="$PWD/installroot"
fi

FAKE_GIT_AUTHOR=$WE
FAKE_GIT_EMAIL=$WE

PREPARE_LEADER="*$WE-prepare* "
SCRAPE_TRAILER="${WE^}-scrape"
SCRAPE_DISCARD=discard
SCRAPE_FILENAME=filename
PATCHSET_ID_REF=$WE/patchset
# Git's default. Note that it claims to be 64 in the man page but there is likely a bug causing it
# to count also the terminating '\0' or so, because git-format-patch produces 63 chars long names.
PATCH_FILENAME_MAX_LENGTH=63
NULL_ID=0000000000000000000000000000000000000000

################################################################################
if [[ $1 != --self-test ]]; then  ###  M A I N  EXECUTION BEGINS HERE  #########
################################################################################

# The project-dir-or-file command line option serves just one purpose: to locate the source tree
# when doing a shadow build.  Including the file name is optional and takes no effect. If multiple
# .pro files exist in the directory, the selection is done inside the .spec file, mentioning one on
# qmake command line.
#
# An exception applies if there is no .spec file. In this case the qmake and make commands behave as
# plain qmake/make invocations, just wrapped with sb2, and the project-dir-or-file option is
# forwarded to qmake without modifications.
#
# If this is called with an empty argument, the default settings is restored.
handle_pro_file_or_dir_option() {
    rm -f "$STATEDIR/src"

    if [[ ! $OPT_PRO_FILE_OR_DIR ]]; then
        return
    fi

    local pro_dir=
    if [[ -d $OPT_PRO_FILE_OR_DIR ]]; then
        pro_dir=$(readlink -f "$OPT_PRO_FILE_OR_DIR")
    else
        pro_dir=$(dirname "$(readlink -f "$OPT_PRO_FILE_OR_DIR")")
    fi

    local src_dir=
    local dir=$pro_dir
    while true; do
        if [[ -d $dir/rpm && $(find "$dir/rpm" -maxdepth 1 -name '*.spec' -o -name '*.yaml') ]]; then
            src_dir=$dir
            break
        fi
        # Skip any "rpm" ancestor in path to avoid misintepreting it as THE rpm directory
        while [[ $(basename "$dir") == rpm ]]; do
            dir=$(dirname "$dir")
        done
        dir=$(dirname "$dir")
        # For in-place builds do not search above the CWD, for shadow builds do not search above
        # common prefix
        if [[ $(readlink -f "$PWD") == "$dir"/* ]]; then
            break
        fi
    done

    # Silently fall back to the directory where the project file resides - some commands can be used
    # without spec file
    : ${src_dir:=$pro_dir}

    # The project file/directory might be specified explicitly for non-shadow builds as well
    if [[ $src_dir != "$(readlink -f "$PWD")" ]]; then
        maybe_symlink_create "$src_dir" "$STATEDIR/src"
    fi
}

maybe_restore_shadow_build() {
    if [[ -e $STATEDIR/src ]]; then
        OPT_SRC_DIR=$(maybe_symlink_read "$STATEDIR/src")
        OPT_PKGDIR=$OPT_SRC_DIR/rpm
    fi
}

is_shadow_build() [[ -e $STATEDIR/src ]]

while [[ "$1" ]]; do
    # only set variables in this loop
    case "$1" in
        "-t" | "--target") shift
            OPT_TARGET="$1"; shift
            ;;
        "-f" | "--shared-folder") shift
            OPT_SHARED_DIR="${1%%+(/)}"; shift
            ;;
        "-d" | "--device") shift
            OPT_DEVICE="$1"; shift
            ;;
        "-o" | "--outputdir" | "--output-dir")
            if [[ $1 == --outputdir ]]; then
                notice "The '$1' option is deprecated. Use '--output-dir' instead."
            fi
            shift
            [[ -d "$1" ]] || fatal "'$1' is not a directory"
            OPT_OUTPUTDIR="${1%%+(/)}"; shift
            OPT_OUTPUT_PREFIX=
            OPT_SEARCH_OUTPUTDIR=1
            ;;
        "-O" | "--output-prefix") shift
            [[ -d "$1" ]] || fatal "'$1' is not a directory"
            OPT_OUTPUT_PREFIX="${1%%+(/)}"; shift
            OPT_OUTPUTDIR=
            OPT_SEARCH_OUTPUTDIR=1
            ;;
        "-p" | "--projectdir")
            notice "The '$1' option is deprecated. Try '$ME --help' for more information."
            [[ -d "$2" ]] || fatal "'$2' is not a directory"
            OPT_PRO_FILE_OR_DIR="${2%%+(/)}"
            shift 2
            ;;
        "--search-output-dir") shift
            OPT_SEARCH_OUTPUTDIR=1
            ;;
        "--search-output-dir"=* )
            OPT_SEARCH_OUTPUTDIR=1
            OPT_SEARCH_OUTPUTDIR_VERBOSITY=${1#*=}
            if [[ $OPT_SEARCH_OUTPUTDIR_VERBOSITY != verbose &&
                    $OPT_SEARCH_OUTPUTDIR_VERBOSITY != quiet ]]; then
                fatal "Unexpected argument to '--search-output-dir': '$OPT_SEARCH_OUTPUTDIR_VERBOSITY'"
            fi
            shift
            ;;
        "--no-search-output-dir") shift
            OPT_SEARCH_OUTPUTDIR=
            ;;
        "-s" | "--specfile" ) shift
            OPT_SPEC="$1"; shift
            ;;
        "-S" | "--snapshot" ) shift
            # TODO Accept snapshot name as a separate argument
            # Omitting '--' in option name for greater clarity to sfdk users
            notice "Use of the 'snapshot' option without argument is deprecated. Acting as if '%pool' was the argument."
            OPT_SNAPSHOT=%pool
            ;;
        "-S"=* | "--snapshot"=* )
            OPT_SNAPSHOT=${1#*=}
            [[ $OPT_SNAPSHOT ]] || fatal "The '--snapshot' option requires an argument"
            shift
            ;;
        "--no-snapshot" ) shift
            OPT_SNAPSHOT=
            OPT_NO_SNAPSHOT=1
            ;;
        "--no-snapshot"=* )
            OPT_SNAPSHOT=
            OPT_NO_SNAPSHOT=${1#*=}
            if [[ $OPT_NO_SNAPSHOT != force ]]; then
                fatal "Unexpected argument to '--no-snapshot': '$OPT_NO_SNAPSHOT'"
            fi
            shift
            ;;
        "-T" | "--task" ) shift
            OPT_TASK=$DEF_TASK
            ;;
        "-T"=* | "--task"=* )
            OPT_TASK=${1#*=}
            shift
            ;;
        "--no-task" ) shift
            OPT_TASK=
            ;;
        "--wrap" )
            [[ $2 == *?:?* ]] || fatal "The '$1' options requires an argument"
            OPT_WRAP+=("$2")
            shift 2
            ;;
        "-i" | "--increment" ) shift
            OPT_INC_BUILD_NUMBER=1
            ;;
        "-P" | "--pedantic" )
            notice "The '$1' option is deprecated and does nothing currently"
            shift
            ;;
        "-x" | "--fix-version" ) shift
            OPT_FIX_VERSION=1
            OPT_FIX_VERSION_HINT=
            OPT_NO_FIX_VERSION=
            ;;
        "-x="* | "--fix-version="* )
            OPT_FIX_VERSION=1
            OPT_FIX_VERSION_HINT=${1#*=}
            OPT_NO_FIX_VERSION=
            shift
            ;;
        "-X" | "--no-fix-version" ) shift
            OPT_FIX_VERSION=
            OPT_FIX_VERSION_HINT=
            OPT_NO_FIX_VERSION=1
            ;;
        "-n" | "--no-deps" | "--no-pull-build-requires" ) shift
            if [[ $1 == --no-deps ]]; then
                notice "The '$1' option is deprecated. Use '--no-pull-build-requires|-n' instead."
            fi
            OPT_PULL_BUILD_REQUIRES=
            ;;
        "-c" | "--changelog" | "--git-change-log" ) shift
            if [[ $1 == --git-change-log ]]; then
                notice "The '$1' option is deprecated. Use '--changelog|-c' instead."
            fi
            OPT_CHANGELOG=1
            ;;
        "-c="* | "--changelog="* | "--git-change-log="* )
            if [[ $1 == --git-change-log=* ]]; then
                notice "The '$1' option is deprecated. Use '--changelog|-c' instead."
            fi
            OPT_CHANGELOG=1
            OPT_CHANGELOG_ARGS="${1#*=}"
            shift
            ;;
        "-m" | "--submodule" )
            notice "The '$1' option is deprecated and does nothing currently"
            shift 2
            ;;
        "--build-shell-args") shift
            OPT_BUILD_SHELL_ARGS="$1"; shift
            ;;
        "--build-shell-maintenance-args") shift
            OPT_BUILD_SHELL_MAINTENANCE_ARGS="$1"; shift
            ;;
        "--package-signing-user") shift
            OPT_PACKAGE_SIGN_USER="$1"; shift
            ;;
        "--package-signing-passphrase") shift
            OPT_PACKAGE_SIGN_PASSPHRASE="$1"; shift
            ;;
        "--package-signing-passphrase-file") shift
            OPT_PACKAGE_SIGN_PASSPHRASE_FILE="$1"; shift
            ;;
        "--package-timeline") shift
            OPT_PACKAGE_TIMELINE=1
            ;;
        "--check-levels" )
            [[ $2 ]] || fatal "The '$1' option requires an argument"
            OPT_CHECK_LEVELS=$2
            shift 2
            ;;
        "--check-suites" )
            [[ $2 ]] || fatal "The '$1' option requires an argument"
            OPT_CHECK_SUITES=$2
            shift 2
            ;;
        "--hooks-dir" )
            [[ $2 ]] || fatal "The '$1' option requires an argument"
            OPT_HOOKS_DIR=$2
            shift 2
            ;;
        "--no-vcs-apply" ) shift
            OPT_NO_VCS_APPLY=1
            ;;
        # Note that OPT_NEEDSPEC and OPT_NEEDTARGET may be additionaly set later
        # during command options parsing!
        build | cmake | compiledb | deploy | install | installdeps \
            | make | make-install | package | prep | prepare | qmake | rpm )
            OPT_NEEDSPEC=1
            OPT_NEEDTARGET=1
            break
            ;;
        apply )
            OPT_NEEDSPEC=1
            break
            ;;
        build-init | build-shell | check )
            OPT_NEEDTARGET=1
            break;
            ;;
        build-requires )
            OPT_NEEDTARGET=1
            OPT_NEEDSPEC=soft
            break;
            ;;
        run | ssh | undeploy | scrape)
            break
            ;;
        --version )
            echo "$ME, version 1.4.88"
            exit 0
            ;;
        --help )
            usage
            exit 0
            ;;
        *)
            short_usage quit
            ;;
    esac
done

global_lock_init

case $1 in
    build-init | build | qmake | cmake )
        if [[ $1 == build-init && -d $STATEDIR ]]; then
            notice "Already initialized. Preserving existing configuration."
        fi
        mkdir -p "$STATEDIR" || exit
        echo '*' > "$STATEDIR/.gitignore"
        ;;
    check | run | ssh )
        # These can deal with STATEDIR missing
        ;;
    *)
        if [[ ! -d $STATEDIR ]]; then
            if ancestor=$(find_upwards "$PWD" ".$WE"); then
                fatal "The command needs to be used from the top of the build tree ($ancestor)." \
                      "See the 'build-init' command."
            else
                fatal "The command needs to be used from the top of a build tree." \
                    "See the 'build-init' command."
            fi
        fi
        ;;
esac

if [[ -n "$OPT_SPEC" ]]; then
    try_to_make_spec "$OPT_SPEC"
    [[ -f "$OPT_SPEC" ]] || fatal "'$OPT_SPEC' does not exist (and could not be made from a .yaml)"
fi

if [[ -n "$OPT_DEVICE" ]]; then
    if [[ ! -f $(get_shared_dir)/devices.xml ]]; then
        fatal "Unable to open '$(get_shared_dir)/devices.xml'." \
            "Please launch Qt Creator for the first time to finish the installation"
    fi
    if ! get_device "$OPT_DEVICE" < $(get_shared_dir)/devices.xml; then
        fatal "'$OPT_DEVICE' is not a known device"
    fi
fi

if [[ $1 == @(run|ssh) ]]; then
    if [[ $1 == ssh ]]; then
        notice "The 'ssh' command is deprecated. Use 'run' instead."
    fi
    if [[ ! "$OPT_DEVICE" ]]; then
        fatal "No device specified"
    fi
    ssh_as "$device_username" "${@:2}"
    exit
fi

if [[ $OPT_NEEDTARGET ]]; then
    if [[ ! $OPT_TARGET ]]; then
        if [[ -f ~/.scratchbox2/config ]]; then
            . ~/.scratchbox2/config
            OPT_TARGET=$DEFAULT_TARGET
        fi
        [[ $OPT_TARGET ]] || fatal "No build target specified and there is no default one"
    fi
fi

[[ ! $OPT_TARGET || -d ~/.scratchbox2/$OPT_TARGET ]] \
    || fatal "'$OPT_TARGET' is not a known build target"

OPT_ORIGINAL_TARGET=$OPT_TARGET
if [[ ! $OPT_NEEDTARGET ]]; then
    # Just in case we failed to set OPT_NEEDTARGET correctly
    OPT_TARGET+=--some-garbage-to-prevent-accidental-use
elif [[ ! $OPT_NO_SNAPSHOT && ! $MB2_SELF_TEST_RUN ]]; then
    reset=soft
    # Only commands that do pull build-requires should reset.
    # The 'build-requires' command resets on its own based on its arguments.
    if [[ $OPT_PULL_BUILD_REQUIRES && $1 = @(prepare|build|qmake|cmake|compiledb)  ]]; then
        reset=outdated
    fi
    switch_to_snapshot --reset="$reset" || fatal "Failed to init build target snapshot"
elif [[ $OPT_NO_SNAPSHOT != force && ! $MB2_SELF_TEST_RUN ]]; then
    if [[ $(sdk-manage target list --snapshots-of "$OPT_TARGET") ]]; then
        echo >&2 "The '$OPT_TARGET' target has snapshots - refusing to use it directly."
        exit 1
    fi
fi

if [[ -e $STATEDIR ]]; then
    printf '%s\n' "$OPT_TARGET" > "$STATEDIR/target"
fi

case "$1" in
    build | qmake | cmake | make | make-install | install | package | rpm | deploy | undeploy \
            | build-requires | installdeps | prepare | prep | apply | build-shell | compiledb \
            | check | build-init | scrape)
        if [[ $1 == install ]]; then
            notice "The 'install' command is deprecated. Use 'make-install' instead."
            set -- make-install "${@:2}"
        fi
        if [[ $1 == installdeps ]]; then
            notice "The 'installdeps' command is deprecated. Use 'build-requires' instead."
            set -- build-requires "${@:2}"
        fi
        if [[ $1 == rpm ]]; then
            notice "The 'rpm' command is deprecated. Use 'package' instead."
            set -- package "${@:2}"
        fi
        if [[ $1 == prep ]]; then
            notice "The 'prep' command is deprecated. Use 'prepare' instead."
            set -- prepare "${@:2}"
        fi
        cmd=run_${1//-/_}
        shift
        if [[ $(type -t ${cmd}__process_args) == function ]]; then
            ${cmd}__process_args "$@"
            shift $#
        fi
        ;;
    *)
        short_usage quit
        ;;
esac

maybe_restore_shadow_build

maybe_set_task_name_from_git_branch_name

if [[ $OPT_OUTPUT_PREFIX ]]; then
    OPT_OUTPUTDIR=$OPT_OUTPUT_PREFIX/${OPT_TASK:+$OPT_TASK/}$OPT_ORIGINAL_TARGET
fi

if [[ $OPT_SEARCH_OUTPUTDIR ]]; then
    mkdir -p "$OPT_OUTPUTDIR"
fi

if [[ $OPT_FIX_VERSION_HINT && ! $(git_ tag --list "$OPT_FIX_VERSION_HINT") ]]; then
    fatal "'$OPT_FIX_VERSION_HINT': no such Git tag"
fi

# spec rules are complex:
#  a .spec is required for some but not all operations
#  if -s is given then
#    if it does not exist then specify tries to make it
#    if it exists it will be used
#  if there is a rpm/*.spec then that is used
#  if there is a rpm/*.yaml then a spec is made and used

if [[ ! $OPT_SPEC ]]; then
    # set nullglob on so that the shell glob expansions will return
    # empty if files are not found
    shopt -s nullglob
    spec_files=("$OPT_PKGDIR"/*.spec)
    numspec=${#spec_files[@]}
    if [[ $numspec -gt 1 ]]; then
        if [[ $OPT_NEEDSPEC ]]; then
            fatal "Multiple RPM SPEC files found - please select one."
        fi
    elif [[ $numspec -eq 1 ]]; then
        OPT_SPEC="${spec_files[0]?}"
    else
        # No spec, try to find a yaml
        yaml_files=("$OPT_PKGDIR"/*.yaml)
        numyaml=${#yaml_files[@]}
        if [[ $numyaml -eq 1 ]]; then
            theyaml=("$OPT_PKGDIR"/*.yaml)
            try_to_make_spec_from_yaml "${theyaml[0]}"
            spec_files=("$OPT_PKGDIR"/*.spec)
            OPT_SPEC="${spec_files[0]?}"
        elif [[ $OPT_NEEDSPEC && $OPT_NEEDSPEC != soft ]]; then
            fatal "No RPM SPEC or YAML file found in '$OPT_PKGDIR/'"
        fi
    fi

    # unset nullglob so that any further globbing works normally
    shopt -u nullglob
fi

# Now if there is a spec given, make sure it is up-to-date
if [[ "$OPT_SPEC" ]]; then
    # turn 'OPT_SPEC' into an absolute path
    OPT_SPEC=$(readlink -f "$OPT_SPEC")
    ensure_spec_newer_than_yaml
    warn_if_crlf_is_used
fi

remove_wrappers_dir

$cmd "$@"

##############################################################################
exit; fi ###  S E L F - T E S T  EXECUTION BEGINS HERE #######################
##############################################################################

set -o nounset

SELF=$(readlink -f "$0")

: ${MB2_SELF_TEST_VERBOSE:=}
: ${MB2_SELF_TEST_TS:=}

temp=$(mktemp -d) || exit
trap "rm -rf '$temp'" EXIT
cd "$temp" || exit

mkdir stubs || exit
STUBS_DIR=$(readlink -f stubs)
ORIG_PATH=$PATH
export PATH="$STUBS_DIR:$PATH"

BUILDHOST=$(get_hostname)
OPT_TARGET=faketarget

if ! which make >/dev/null; then
    fatal "Please install 'make' and try again"
fi

export MB2_SELF_TEST_RUN=1

################################################################################
# Test utils

tc_num=0
tc_failed_num=0

set_up_ts() {
    local ts=$1
    TS_NAME=$2

    if [[ $MB2_SELF_TEST_TS && $TS_NAME != "$MB2_SELF_TEST_TS" ]]; then
        return
    fi

    ${ts}_ts_set_up "${@:3}"
    if [[ $? -ne 0 ]]; then
        fatal "Test suite set-up failed: $ts"
    fi
}

tear_down_ts() {
    local ts=$1

    if [[ $MB2_SELF_TEST_TS && $TS_NAME != "$MB2_SELF_TEST_TS" ]]; then
        echo "*** Skipping test suite: $TS_NAME"
        return
    fi

    ${ts}_ts_tear_down
    if [[ $? -ne 0 ]]; then
        fatal "Test suite tear-down failed: $ts"
    fi
    TS_NAME=
}

KNOWN_ISSUES=()
KNOWN_ISSUE() {
    KNOWN_ISSUES=(${KNOWN_ISSUES[@]:+"${KNOWN_ISSUES[@]}"} "$1")
}

run_tc() {
    local tc=$1
    TC_NAME=$2
    local args=("${@:3}")

    if [[ $MB2_SELF_TEST_TS && ${TS_NAME:-} != "$MB2_SELF_TEST_TS" ]]; then
        KNOWN_ISSUES=()
        return
    fi

    let tc_num++
    echo "*** Executing test case: ${TS_NAME:+$TS_NAME - }$TC_NAME"

    local known_issue
    for known_issue in ${KNOWN_ISSUES[@]:+"${KNOWN_ISSUES[@]}"}; do
        echo "    Known issue: $known_issue"
    done
    KNOWN_ISSUES=()

    local stderr=
    { stderr=$(${tc}_tc ${args[@]:+"${args[@]}"} 3>&1 1>&2 2>&3 3>&-); } 2>&1
    local rc=$?

    if [[ $rc -ne 0 ]]; then
        let tc_failed_num++
    fi

    if [[ $rc -ne 0 || $MB2_SELF_TEST_VERBOSE ]]; then
        cat <<END
  ** Stderr     ** [[
$stderr
]]
END
    fi

    if [[ $(type -t ${tc}_tc_tear_down) == function ]]; then
        ${tc}_tc_tear_down
        if [[ $? -ne 0 ]]; then
            fatal "Test case tear-down failed: $tc"
        fi
    fi

    return $rc
}

arg_pass_stub_create() {
    while [[ ${1:-} ]]; do
        local name=${1%:*}
        local opts=${1##*:}
        local stub=$STUBS_DIR/$name
        local results=$STUBS_DIR/$name.args

        local redirect='>'
        if [[ $opts == *a* ]]; then
            redirect='>>'
        fi

        touch "$results" || return
        cat >"$stub" <<END || return
#!/bin/bash
echo "stub invoked: \$0 \$(printf "%q " "\$@")" >&2
{ [[ \$# -ne 0 ]] && printf "%q\\n" "\$@"; } $redirect"$results"
exit 0
END
        chmod +x "$stub" || return
        shift
    done
}

arg_pass_stub_remove() {
    while [[ ${1:-} ]]; do
        local stub=$STUBS_DIR/$1
        local results=$STUBS_DIR/$1.args
        rm -f "$stub" || return
        rm -f "$results" || return
        shift
    done
}

arg_pass_stub_results() {
    local results=$STUBS_DIR/$1.args
    [[ -e $results ]] && cat "$results"
}

arg_pass_stub_clear() {
    while [[ ${1:-} ]]; do
        local results=$STUBS_DIR/$1.args
        rm -f "$results" || return
        shift
    done
}

expect_args() {
    [[ $# -ne 0 ]] && printf "%q\n" "$@" || true
}

expect_next_args() {
    echo
    expect_args "$@"
}

sb2_stub() {
    echo "stub invoked: $0 $(printf "%q " "$@")" >&2

    local args=("$@")
    bad_args() {
        echo "stub $0: unexpected arguments: $(printf "%q " "${args[@]}")" >&2
    }

    [[ $1 == -t ]] && shift 2 || { bad_args; return 1; }
    [[ $1 == -m ]] && shift 2
    [[ $1 == -R ]] && shift
    [[ $1 == -- ]] && shift
    [[ $1 != -* ]] || { bad_args; return 1; }

    env=(env)
    if [[ $1 == env ]]; then
        shift
        while [[ $1 == -* || $1 =~ ^[[:alnum:]_]+= ]]; do
            env+=("$1")
            shift
        done
    fi

    case $1 in
        gcc)
            case $2 in
                -dumpmachine)
                    echo "i486-meego-linux"
                    ;;
                *)
                    bad_args
                    return 1
                    ;;
            esac
            ;;
        make)
            exec "${env[@]}" "$@"
            ;;
        rpmbuild)
            # rpmbuild writes some progress messages to stdout
            exec "${env[@]}" "$@" >&2
            ;;
        rpmspec)
            exec "${env[@]}" "$@"
            ;;
        zypper)
            exec "${env[@]}" "$@"
            ;;
        *)
            bad_args
            return 1
            ;;
    esac
}

sb2_stub_create() {
    cat >"$STUBS_DIR/sb2" <<END || return
#!/bin/bash
$(declare -f sb2_stub)
sb2_stub "\$@"
END
    chmod +x "$STUBS_DIR/sb2" || return
}

sb2_stub_remove() {
    rm -f "$STUBS_DIR/sb2" || return
}

DEFAULT_TARGET=$(. ~/.scratchbox2/config; echo "$DEFAULT_TARGET")
DEFAULT_JOBS=$(getconf _NPROCESSORS_ONLN)

if inside_build_engine && [[ ${SAILFISH_SDK_FRONTEND:-} != sfdk ]]; then
    fatal "Please use 'sfdk engine exec' to execute the self-test under the build engine"
fi

if inside_build_engine; then
    # Assume the first one mentioned is the most recent one
    DEFAULT_DEVICE=$(sed -n 's/.*name="\(Sailfish OS Emulator[^"]*\)".*/\1/; T; p; q' \
        "$(get_shared_dir)/devices.xml")
else
    echo "Note: Skipping test cases involving device connection - not inside a Sailfish OS build engine" >&2
    DEFAULT_DEVICE=
fi

################################################################################
# Test the rpm_quote function

rpm_quote_tc() {
    arg_pass_stub_create make || return

    # Create an minimal spec file
    mkdir rpm || return
    cat >rpm/test.spec <<END || return
Name: test
Version: 1.0
Release: 0
Summary: Test
License: BSD

# This would normally come from the environment
%{!?make:%define make make}

%description
Test

%build
%make "x  y"
END

    rpmbuild -bc --short-circuit --noprep --build-in-place --define "_sourcedir $(readlink -f rpm)" rpm/test.spec \
        --define "make make $(rpm_quote "a b" "c  d" e f)" >&2 || return
    local rc=0 expected= actual= diff=
    expected=$(expect_args "a b" "c  d" e f "x  y")
    actual=$(arg_pass_stub_results make) || return
    if ! diff=$(diff <(cat <<<"$expected") <(cat <<<"$actual")); then
        rc=1
        cat <<END
Test case failed: $TC_NAME
  ** Mismatch   ** [[
$diff
]]
  ** Expected   ** [[
$expected
]]
  ** Actual     ** [[
$actual
]]
END
    fi

    arg_pass_stub_remove make || return
    rm -f rpm/test.spec || return
    rmdir rpm || return

    return $rc
}

run_tc rpm_quote "Test RPM macro escaping"

################################################################################
# Test sb2 argument passing

sb2_arg_pass_ts_set_up() {
    arg_pass_stub_create sb2 || return

    # Create an empty spec file
    mkdir rpm || return
    : >rpm/test.spec || return
    "$SELF" build-init || return
}

sb2_arg_pass_ts_tear_down() {
    arg_pass_stub_remove sb2 || return
    rm -f rpm/test.spec || return
    rmdir rpm || return
}

sb2_arg_pass_tc() {
    local expected=$1
    local args=("${@:2}")

    arg_pass_stub_clear sb2 || return
    "$SELF" "${args[@]}" || return
    local actual_raw= actual= diff=
    actual_raw=$(arg_pass_stub_results sb2) || return
    actual=$(sed -e 's,/home/mersdk/.mb2/wrappers/tmp/tmp\.[[:alnum:]]\+,/tmp/@TMPDIR@/.mb2/wrappers,g' \
                 -e 's,/home/mersdk/.mb2/user-wrappers/tmp/tmp\.[[:alnum:]]\+,/tmp/@TMPDIR@/.mb2/user-wrappers,g' \
                 -e 's,/tmp\.[[:alnum:]]\+,/@TMPDIR@,g' \
                 -e "s,/home/deploy,/tmp/@TMPDIR@,g" <<<"$actual_raw")
    if ! diff=$(diff <(cat <<<"$expected") <(cat <<<"$actual")); then
        cat <<END
Test case failed: $TC_NAME
  ** Command    ** $ME $(quote "${args[@]}")
  ** Mismatch   ** [[
$diff
]]
  ** Expected   ** [[
$expected
]]
  ** Actual     ** [[
$actual
]]
  ** Actual RAW ** [[
$actual_raw
]]
END
        return 1
    fi
}

set_up_ts sb2_arg_pass "Passing arguments to sb2"

WRAPPERS_PATH="/tmp/@TMPDIR@/.mb2/wrappers:/tmp/@TMPDIR@/.mb2/user-wrappers"

KNOWN_ISSUE "Does not disable debug by default (ignores --enable-debug)"
KNOWN_ISSUE "Passes -j without argument"
KNOWN_ISSUE "Silently ignores extra arguments"
run_tc sb2_arg_pass "prepare" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define '_smp_mflags -j' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__patch patch" \
        -bp --short-circuit \
        /tmp/@TMPDIR@/.mb2/spec)" \
    prepare "a b" "c  d" e f

run_tc sb2_arg_pass "build" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define 'debug_package %{nil}' \
        --define "_smp_mflags -j$(rpm_quote "bad  1")" \
        --define '_rpmdir /tmp/@TMPDIR@/RPMS' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define '_rpmfilename %%{name}-%%{version}-%%{release}.%%{arch}.rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__patch patch" \
        --buildroot=/tmp/@TMPDIR@/installroot \
        -bb \
        /tmp/@TMPDIR@/.mb2/spec \
        "a b" "c  d" e f)" \
    build --prepare -j "bad  1" "a b" "c  d" e f

run_tc sb2_arg_pass "build with debug" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define "_smp_mflags -j$(rpm_quote "bad  1")" \
        --define '_rpmdir /tmp/@TMPDIR@/RPMS' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define '_rpmfilename %%{name}-%%{version}-%%{release}.%%{arch}.rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__patch patch" \
        --buildroot=/tmp/@TMPDIR@/installroot \
        -bb \
        /tmp/@TMPDIR@/.mb2/spec \
        "a b" "c  d" e f)" \
    build --prepare --enable-debug -j "bad  1" "a b" "c  d" e f

run_tc sb2_arg_pass "qmake" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define 'noecho 1' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__make make" \
        -bc --short-circuit \
        /tmp/@TMPDIR@/.mb2/spec)" \
    qmake "a b" "c  d" e f

run_tc sb2_arg_pass "cmake" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define 'noecho 1' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__make make" \
        -bc --short-circuit \
        /tmp/@TMPDIR@/.mb2/spec)" \
    cmake . "a b" "c  d" e f

run_tc sb2_arg_pass "make" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- env PATH="$WRAPPERS_PATH:/tmp/@TMPDIR@/stubs:$ORIG_PATH" rpmbuild \
        --build-in-place --target= \
        --define 'noecho 1' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define "_buildhost $BUILDHOST" \
        --define "__make make" \
        -bc --short-circuit \
        /tmp/@TMPDIR@/.mb2/spec)" \
    make "a b" "c  d" e f

KNOWN_ISSUE "Silently ignores extra arguments"
run_tc sb2_arg_pass "make-install" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- rpmbuild --build-in-place --target= \
        --define 'noecho 1' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define "_buildhost $BUILDHOST" \
        --define '_skip_install_processing 1' \
        --buildroot='/tmp/@TMPDIR@/installroot' \
        -bi --short-circuit --nocheck \
        /tmp/@TMPDIR@/.mb2/spec)" \
    make-install "a b" "c  d" e f

run_tc sb2_arg_pass "package" \
    "$(expect_args \
        -t "$DEFAULT_TARGET" -- rpmbuild --build-in-place --target= \
        --define 'noecho 1' \
        --define '_rpmdir /tmp/@TMPDIR@/RPMS' \
        --define '_sourcedir /tmp/@TMPDIR@/rpm' \
        --define '_rpmfilename %%{name}-%%{version}-%%{release}.%%{arch}.rpm' \
        --define "_buildhost $BUILDHOST" \
        --buildroot='/tmp/@TMPDIR@/installroot' \
        -bb --noprep --nobuildstage --noclean \
        /tmp/@TMPDIR@/.mb2/spec)" \
    package

tear_down_ts sb2_arg_pass

################################################################################
# Test arguments passing to commands invoked by rpmbuild

rpmbuild_arg_pass_ts_set_up() {
    RPMBUILD_STUBS=(qmake cmake:a make)
    arg_pass_stub_create "${RPMBUILD_STUBS[@]}"
    RPMBUILD_STUBS=("${RPMBUILD_STUBS[@]%:*}")

    sb2_stub_create || return

    # Create a minimal spec file
    mkdir rpm || return
    cat >rpm/test.spec <<END || return
Name:       test

# These would normally come from the environment
%{!?qmake:%define qmake qmake}
%{!?qmake5:%define qmake5 qmake}
%{!?cmake:%define cmake cmake}

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

Summary:    Test
Version:    0.1
Release:    1
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2

%description
Test

%prep
%setup -q -n %{name}-%{version}

%build
%qmake5
%cmake .
cmake --build .
make %{?_smp_mflags}

%install
mkdir -p %{buildroot}

%files
%defattr(-,root,root,-)
END
}

rpmbuild_arg_pass_ts_tear_down() {
    arg_pass_stub_remove "${RPMBUILD_STUBS[@]}" || return
    sb2_stub_remove || return
    rm -f rpm/test.spec || return
    rmdir rpm || return
}

rpmbuild_arg_pass_tc() {
    local all_expected=("${@:1:${#RPMBUILD_STUBS[*]}}")
    local args=("${@:1+${#RPMBUILD_STUBS[*]}}")

    local rc=0

    arg_pass_stub_clear "${RPMBUILD_STUBS[@]}"
    "$SELF" "${args[@]}" || return
    local i=
    for ((i=0; i<${#RPMBUILD_STUBS[@]}; i++)); do
        local stub= expected= actual= diff=
        stub=${RPMBUILD_STUBS[i]}
        expected=${all_expected[i]}
        actual=$(arg_pass_stub_results "$stub")
        if ! diff=$(diff <(cat <<<"$expected") <(cat <<<"$actual")); then
            rc=1
            cat <<END
Test case failed: $TC_NAME/$i
  ** Command    ** $ME $(printf "%q " "${args[@]}")
  ** Failed for ** $stub
  ** Mismatch   ** [[
$diff
]]
  ** Expected   ** [[
$expected
]]
  ** Actual     ** [[
$actual
]]
END
        fi
    done

    return $rc
}

set_up_ts rpmbuild_arg_pass "Passing arguments to commands invoked by rpmbuild"

run_tc rpmbuild_arg_pass "build" \
    "$(expect_args )" \
    "$(expect_args .)$(expect_next_args --build .)" \
    "$(expect_args -j"$DEFAULT_JOBS")" \
    build

run_tc rpmbuild_arg_pass "qmake" \
    "$(expect_args "a b" "c  d" e f)" \
    "$(expect_args .)$(expect_next_args --build .)" \
    "$(expect_args )" \
    qmake "a b" "c  d" e f

run_tc rpmbuild_arg_pass "cmake / configure" \
    "$(expect_args )" \
    "$(expect_args . "a b" "c  d" e f)" \
    "$(expect_args )" \
    cmake . "a b" "c  d" e f

maybe_jopt=
if [[ $DEFAULT_JOBS -ne 1 ]]; then
    maybe_jopt=-j$DEFAULT_JOBS
fi

run_tc rpmbuild_arg_pass "cmake / build" \
    "$(expect_args )" \
    "$(expect_args --build . "a b" "c  d" e f -- "g h" "i  j" k l)" \
    "$(expect_args $maybe_jopt "g h" "i  j" k l foo)" \
    cmake --build . --target foo "a b" "c  d" e f -- "g h" "i  j" k l

run_tc rpmbuild_arg_pass "make" \
    "$(expect_args )" \
    "$(expect_args --build . -- "a b" "c  d" e f)" \
    "$(expect_args $maybe_jopt "a b" "c  d" e f)" \
    make "a b" "c  d" e f

tear_down_ts rpmbuild_arg_pass

################################################################################
# Test arguments passing to commands invoked using run|ssh subcommand

ssh_arg_pass_ts_set_up() {
    :
}

ssh_arg_pass_ts_tear_down() {
    :
}

ssh_arg_pass_tc() {
    local expected=$1
    local args=("${@:2}")

    local actual= diff=
    actual=$("$SELF" "${args[@]}")
    if ! diff=$(diff <(cat <<<"$expected") <(cat <<<"$actual")); then
        cat <<END
Test case failed: $TC_NAME
  ** Command    ** $ME $(quote "${args[@]}")
  ** Mismatch   ** [[
$diff
]]
  ** Expected   ** [[
$expected
]]
  ** Actual     ** [[
$actual
]]
END
        return 1
    fi
}

set_up_ts ssh_arg_pass "Passing arguments using run|ssh subcommand"

if [[ $DEFAULT_DEVICE ]]; then
    # Avoid invoking the built-in printf. The device is likely running
    # busybox' ash whose built-in printf does not implement %q.
    run_tc ssh_arg_pass "remote - just spaces" \
        "$(/usr/bin/printf '%q ' 'a b' 'c  d' e f)" \
        --device "$DEFAULT_DEVICE" run /usr/bin/printf '%q ' 'a b' 'c  d' e f

    run_tc ssh_arg_pass "remote - spaces and newlines" \
        "$(/usr/bin/printf '%q'$'\n' 'echo b' 'c  d' e f)" \
        --device "$DEFAULT_DEVICE" run /usr/bin/printf '%q'$'\n' 'echo b' 'c  d' e f

    run_tc ssh_arg_pass "remote - spaces and backslashes" \
        "$(/usr/bin/printf '%q\n' 'a\ b' 'c  d' e f)" \
        --device "$DEFAULT_DEVICE" run /usr/bin/printf '%q\n' 'a\ b' 'c  d' e f
fi

tear_down_ts ssh_arg_pass

################################################################################
# Test --fix-version behavior

fix_version_ts_set_up() {
    COMMITS_U=()
    COMMITS_D=()

    get_commit() {
        eval echo \${COMMITS_${1:0:1}[${1:1}]} |grep .
    }

    local i= commit= tag_name= tag_object=

    git init . --quiet || return
    git config user.email "john@doe.com" || return
    git config user.name "John Doe" || return

    git checkout -b upstream --quiet
    for ((i=0; i<=30; i++)); do
        printf "U%02d\n" "$i" > U
        git add U || return
        date="@$(TZ=UTC date --date="2000-01-01 00:00:00 UTC +$i days" +%s) +0000"
        GIT_AUTHOR_DATE=$date GIT_COMMITTER_DATE=$date \
            git commit --message "$(<U)" --quiet || return
        commit=$(git rev-list --max-count=1 --abbrev-commit HEAD) || return
        COMMITS_U=(${COMMITS_U[*]:-} $commit)
    done

    git checkout -b downstream "${COMMITS_U[10]}" --quiet || return
    for ((i=0; i<=20; i++)); do
        if [[ $i -eq 10 ]]; then
            git merge --no-commit "${COMMITS_U[20]}" --quiet 2>/dev/null || return
        fi
        printf "D%02d\n" "$i" > D
        git add D || return
        date="@$(TZ=UTC date --date="2000-01-01 00:00:00 UTC +$((i+10)) days +12 hours" +%s) +0000"
        GIT_AUTHOR_DATE=$date GIT_COMMITTER_DATE=$date \
            git commit --message "$(<D)" --quiet || return
        commit=$(git rev-list --max-count=1 --abbrev-commit HEAD) || return
        COMMITS_D=(${COMMITS_D[*]:-} $commit)
    done

    while [[ ${1:-} ]]; do
        case $1 in
            --tag)
                tag_name=$2
                tag_object=$(get_commit $3) || return
                git tag "$tag_name" "$tag_object" || return
                shift 2
                ;;
            --tag-a)
                tag_name=$2
                tag_object=$(get_commit $3) || return
                tag_annotation="$3"
                git tag -a -m "$tag_annotation" "$tag_name" "$tag_object" || return
                shift 2
                ;;
            *)
                return 1
                ;;
        esac
        shift
    done
}

fix_version_ts_tear_down() {
    unset COMMITS_U COMMITS_D
    unset -f get_commit
    rm -rf .git || return
    rm -f U D dirty || return
}

fix_version_tc() {
    local dirty=$1 head=$2 expected_raw=$3 hint=${4:-}

    head=$(get_commit "$head") || return

    git reset --hard --quiet || return
    git checkout -B downstream "$head" --quiet || return
    if [[ $dirty == dirty ]]; then
        echo dirty >> D
    fi

    local version_file=version
    local last_stash_file=last_stash

    echo "Version:" > $version_file

    (
        git() (
            echo "stub invoked: git" >&2

            options=()
            while [[ $# -gt 0 ]]; do
                case $1 in
                    -c)
                        options+=("$1" "$2")
                        shift 2
                        ;;
                    -*)
                        echo "stub git: unhandled option '$1' passed" >&2
                        return 1
                        ;;
                    *)
                        break
                        ;;
                esac
            done

            subcommand=("$@")
            set -- "${options[@]}" "${subcommand[@]}"

            if [[ ${subcommand[0]} == stash && ${subcommand[1]} == create ]]; then
                date="@$(TZ=UTC date --date="2000-12-31 18:00:00 UTC" +%s) +0000"
                GIT_AUTHOR_DATE=$date GIT_COMMITTER_DATE=$date \
                    command git "$@" |tee $last_stash_file
            else
                command git "$@"
            fi
        )

        OPT_FIX_VERSION=1
        OPT_FIX_VERSION_HINT=$hint
        OPT_NO_FIX_VERSION=
        maybe_fix_package_version $version_file >&2
    ) || return

    local actual=$(sed 's/^Version: //' $version_file)

    local stash_sha1=
    if [[ -e $last_stash_file ]]; then
        stash_sha1=$(git rev-list --max-count=1 --abbrev-commit "$(<$last_stash_file)")
    fi

    local expected=$expected_raw i=
    for ((i=0; i<=30; i++)); do
        expected=${expected//@U$i@/${COMMITS_U[$i]}}
    done
    for ((i=0; i<=20; i++)); do
        expected=${expected//@D$i@/${COMMITS_D[$i]}}
    done
    expected=${expected//@STASH@/${stash_sha1:-<none>}}

    if [[ $actual != "$expected" ]]; then
        cat <<END
Test case failed: $TC_NAME
  ** Arguments    ** $(printf "%q " "$@")
  ** Expected     ** '$expected'
  ** Actual       ** '$actual'
  ** Expected RAW ** '$expected_raw'
  ** Git-status   ** [[
$(git status)
]]
  ** Git-log      ** [[
$(git log --graph --decorate --pretty=fuller --all ${stash_sha1:-})
]]
END
        return 1
    fi
}

# Works with Git history like this
#
# U00---...---U10---U11---...---U20---U21---...---U30 <-- upstream
#               \                 \
#                D00---D01---...---D10---D11---...---D20 <-- downstream
#
# Upstream commit dates are 2000-01-01 + <num> days 00:00 UTC, downstream commit dates are
# 2000-01-01 + <num+10> days 12:00 UTC, stashes are 2000-12-31 18:00 UTC.

set_up_ts fix_version "Test --fix-version behavior" \
    --tag v0.1 U5 \
    --tag v0.2 U10 \
    --tag v0.3 U15 \
    --tag v0.4 U20 \
    --tag v0.5 U25 \
    --tag mer/v0.2+git1 D5 \
    --tag mer/v0.4+git1 D11 \
    --tag-a mer/v0.4+git2 D15 \
    --tag mer/v0.4+git3 D20

run_tc fix_version "on tag, clean D5"               clean D5  0.2+git1
run_tc fix_version "ahead tag, clean D7"            clean D7  0.2+git1+downstream.2000''01''18''12''00''00.@D7@
run_tc fix_version "ahead tag, dirty D7"            dirty D7  0.2+git1+downstream.2000''12''31''18''00''00.@STASH@
run_tc fix_version "ahead upstream tag, clean D10"  clean D10 0.4+downstream.2000''01''21''12''00''00.@D10@
run_tc fix_version "on tag, clean D11"              clean D11 0.4+git1
run_tc fix_version "on tag, dirty D11"              dirty D11 0.4+git1+downstream.2000''12''31''18''00''00.@STASH@
run_tc fix_version "ahead tag, clean D12"           clean D12 0.4+git1+downstream.2000''01''23''12''00''00.@D12@
run_tc fix_version "ahead tag, dirty D12"           dirty D12 0.4+git1+downstream.2000''12''31''18''00''00.@STASH@
run_tc fix_version "on annotated, clean D15"        clean D15 0.4+git2
run_tc fix_version "ahead annotated, clean D16"     clean D16 0.4+git2+downstream.2000''01''27''12''00''00.@D16@
run_tc fix_version "ahead annotated, dirty D16"     dirty D16 0.4+git2+downstream.2000''12''31''18''00''00.@STASH@
run_tc fix_version "on tag, clean D15 + hint"       clean D15 0.4+git1+downstream.2000''01''26''12''00''00.@D15@    mer/v0.4+git1
run_tc fix_version "on tag, dirty D15 + hint"       dirty D15 0.4+git1+downstream.2000''12''31''18''00''00.@STASH@  mer/v0.4+git1
run_tc fix_version "ahead tag, clean D16 + hint"    clean D16 0.4+git1+downstream.2000''01''27''12''00''00.@D16@    mer/v0.4+git1
run_tc fix_version "ahead tag, dirty D16 + hint"    dirty D16 0.4+git1+downstream.2000''12''31''18''00''00.@STASH@  mer/v0.4+git1

tear_down_ts fix_version

################################################################################
# Test the readarray behavior - simplified version implemented as a shell
# function for compatibility with older BASH

readarray_tc() {
    local expected=('  abc *  def  ' '  g  h  i  ')
    local actual=()
    echo "Note: readarray is a $(type -t readarray)" >&2
    readarray -t actual < <(printf "%s\n" "${expected[@]}")
    if ! (
        [[ ${#expected[*]} -eq ${#actual[*]} ]] || exit
        for ((i=0; i<${#expected[*]}; i++)); do
            [[ ${actual[$i]} == "${expected[$i]}" ]] || exit
        done
        ); then
        cat <<END
Test case failed: $TC_NAME
  ** Expected   ** [[
$(declare -p expected)
]]
  ** Actual     ** [[
$(declare -p actual)
]]
END
        return 1
    fi
}

run_tc readarray "Test readarray behavior"

################################################################################
# Test build phases separation

rpmbuild_phases_ts_set_up() {
    RPMBUILD_STUBS=(my_setup qmake cmake:a make compiledb my_install my_check)
    arg_pass_stub_create "${RPMBUILD_STUBS[@]}"
    RPMBUILD_STUBS=("${RPMBUILD_STUBS[@]%:*}")

    sb2_stub_create || return

    # Create a minimal spec file
    mkdir rpm || return
    cat >rpm/test.spec <<END || return
Name:       test

# These would normally come from the environment
%{!?qmake:%define qmake qmake}
%{!?qmake5:%define qmake5 qmake}
%{!?cmake:%define cmake cmake}

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

Summary:    Test
Version:    0.1
Release:    1
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2

%description
Test

%prep
my_setup

%build
%qmake5
%cmake .
cmake --build .
make %{?_smp_mflags}

%install
mkdir -p %{buildroot}
my_install

# Verifies that --noclean is used
touch %{buildroot}/foo

%check
my_check

%files
%defattr(-,root,root,-)
/foo
END
}

rpmbuild_phases_ts_tear_down() {
    arg_pass_stub_remove "${RPMBUILD_STUBS[@]}" || return
    sb2_stub_remove || return
    rm -f rpm/test.spec || return
    rmdir rpm || return
    rm -f RPMS/test-0.1-1.i486.rpm || return
    ! [[ -d RPMS ]] || rmdir RPMS || return
    rm -f "$buildroot/foo" || return
}

rpmbuild_phases_tc() {
    local expected=$1
    local args=("${@:2}")

    arg_pass_stub_clear "${RPMBUILD_STUBS[@]}" || return
    rm -f RPMS/test-0.1-1.i486.rpm || return

    "$SELF" "${args[@]}" || return

    local actual=
    if arg_pass_stub_results my_setup &>/dev/null; then
        actual="+prepare"
    else
        actual="-prepare"
    fi

    if arg_pass_stub_results qmake &>/dev/null; then
        actual="$actual +qmake"
    else
        actual="$actual -qmake"
    fi

    if arg_pass_stub_results cmake |head -n1 |grep -q --line-regexp -F -e .; then
        actual="$actual +cmake-c"
    else
        actual="$actual -cmake-c"
    fi

    if arg_pass_stub_results cmake |grep -q --line-regexp -F -e --build; then
        actual="$actual +cmake-b"
    else
        actual="$actual -cmake-b"
    fi

    if arg_pass_stub_results make |grep -q -F -e --dry-run; then
        actual="$actual ~make"
    elif arg_pass_stub_results make &>/dev/null; then
        actual="$actual +make"
    else
        actual="$actual -make"
    fi

    if arg_pass_stub_results my_install &>/dev/null; then
        actual="$actual +make-install"
    else
        actual="$actual -make-install"
    fi

    if arg_pass_stub_results my_check &>/dev/null; then
        actual="$actual +check"
    else
        actual="$actual -check"
    fi

    if [[ -e RPMS/test-0.1-1.i486.rpm ]]; then
        if rpm -q --requires -p RPMS/test-0.1-1.i486.rpm |grep -q '^rpmlib(ShortCircuited)'; then
            actual="$actual +package (short-circuited)"
        else
            actual="$actual +package"
        fi
    else
        actual="$actual -package"
    fi

    if [[ $actual != "$expected" ]]; then
        cat <<END
Test case failed: $TC_NAME
  ** Command    ** $ME $(printf "%q " "${args[@]}")
  ** Expected   ** $expected
  ** Actual     ** $actual
END
        return 1
    fi

    return 0
}

set_up_ts rpmbuild_phases "Separating build phases"

run_tc rpmbuild_phases "prepare" \
    "+prepare -qmake -cmake-c -cmake-b -make -make-install -check -package" prepare
run_tc rpmbuild_phases "build" \
    "-prepare +qmake +cmake-c +cmake-b +make +make-install +check +package" build
run_tc rpmbuild_phases "build --prepare" \
    "+prepare +qmake +cmake-c +cmake-b +make +make-install +check +package" build --prepare
run_tc rpmbuild_phases "qmake" \
    "-prepare +qmake +cmake-c +cmake-b -make -make-install -check -package" qmake
run_tc rpmbuild_phases "cmake / configure" \
    "-prepare +qmake +cmake-c -cmake-b -make -make-install -check -package" cmake .
run_tc rpmbuild_phases "cmake / build" \
    "-prepare +qmake -cmake-c +cmake-b +make -make-install -check -package" cmake --build .
run_tc rpmbuild_phases "make" \
    "-prepare -qmake -cmake-c +cmake-b +make -make-install -check -package" make
run_tc rpmbuild_phases "make-install" \
    "-prepare -qmake -cmake-c -cmake-b -make +make-install -check -package" make-install
run_tc rpmbuild_phases "package" \
    "-prepare -qmake -cmake-c -cmake-b -make +make-install +check +package" package
run_tc rpmbuild_phases "compiledb" \
    "-prepare +qmake +cmake-c +cmake-b ~make -make-install -check -package" compiledb

tear_down_ts rpmbuild_phases

################################################################################
# Test compatibility with various packaging options

# Accepts up to one argument - .pro file or a directory where to looks for a pro file. Defaults to
# current working directory. Creates Makefile that makes single file named after the TARGET name and
# installs it directly under the INSTALL_ROOT.
special_packaging_qmake_stub() {
    shopt -s nullglob
    if [[ $# -eq 0 ]]; then
        pro_files=(*.pro)
    elif [[ $# -eq 1 ]]; then
        if [[ $1 == *.pro ]]; then
            if [[ ! -e $1 ]]; then
                echo "stub qmake: file not found: $1" >&2
                return 1
            fi
            pro_files=($1)
        elif [[ -d $1 ]]; then
            pro_files=("$1"/*.pro)
        else
            echo "stub qmake: not a directory: $1" >&2
            return 1
        fi
    else
        echo "stub qmake: up to one argument expected" >&2
        return 1
    fi

    if [[ ${#pro_files[*]} -lt 1 ]]; then
        echo "stub qmake: no project file found" >&2
        return 1
    elif [[ ${#pro_files[*]} -gt 1 ]]; then
        echo "stub qmake: multiple project files found - select one" >&2
        return 1
    fi

    target=$(sed -n 's/^TARGET=//p' $pro_files)
    if [[ ! $target ]]; then
        echo "stub qmake: failed to parse project file '$pro_files'" >&2
        return 1
    fi

    cat >Makefile <<EOF
all:
	echo $target > $target

install:
	cp $target \$(INSTALL_ROOT)/$target
EOF
}

special_packaging_ts_set_up() {
    sb2_stub_create || return

    cat >"$STUBS_DIR/qmake" <<END || return
#!/bin/bash
$(declare -f special_packaging_qmake_stub)
special_packaging_qmake_stub "\$@"
END
    chmod +x "$STUBS_DIR/qmake" || return
}

special_packaging_ts_tear_down() {
    sb2_stub_remove || return
    rm -f "$STUBS_DIR/qmake" || return
}

# Accepts 7 arguments described inline
special_packaging_tc() {
    # SUBDIR: Put sources under subdirectory
    #   - N: No
    #   - Y: Yes, pass it to the %setup macro
    #   - NOSET: Yes, do not pass it to the %setup macro
    local maybe_subdir=$([[ $1 == Y || $1 == NOSET ]] && echo test)
    local maybe_setupdir=$([[ $1 == Y ]] && echo "/%{name}")

    # MULTI: Create multiple .spec and .pro files
    #   - Y/N
    local enable_multi=$([[ $2 == Y ]] && echo 1)

    # PATCH: Create .spec with patches
    #   - N: No
    #   - APPLY: Yes, use 'mb2 apply' to apply them
    #   - PREP: Yes, use 'mb2 prepare' to apply them
    #   - BPREP: Yes, use 'mb2 build --prepare' to apply them
    local enable_patch=$([[ $3 == APPLY || $3 == PREP || $3 == BPREP ]] && echo "$3")

    # SHADOW BUILD: Do shadow build
    #   - Y/N
    local enable_shadow=$([[ $4 == Y ]] && echo 1)

    # PROJECT IN SPEC: Pass .pro file to qmake call in the .spec file
    #   - Y/N
    local maybe_project_in_spec=
    if [[ $5 == Y ]]; then
        if [[ ! $maybe_subdir || $maybe_setupdir ]]; then
            maybe_project_in_spec=test.pro
        else
            maybe_project_in_spec=$maybe_subdir/test.pro
        fi
    fi

    # PROJECT ON CMDLN: Pass .pro file or directory on command line
    #   - N: No
    #   - FILE: Yes
    #   - DIR: Yes, pass the path to the subdirectory containing the .pro file
    #   - OTHER: Yes, pass other .pro file than used in the .spec
    local project_on_cmdln=
    case $6 in
        FILE) project_on_cmdln=${maybe_subdir:+$maybe_subdir/}test.pro;;
        DIR) project_on_cmdln=$maybe_subdir;;
        OTHER) project_on_cmdln=${maybe_subdir:+$maybe_subdir/}other.pro;;
    esac

    # AUTO SETUP: Use %autosetup
    #   - Y/N
    local setup_method="setup -q"
    local enable_patch_auto=
    local enable_patch_explicit=$enable_patch
    if [[ $7 == Y ]]; then
        local setup_method="autosetup"
        local enable_patch_auto=$enable_patch_explicit
        enable_patch_explicit=
    fi

    local srcdir=test
    local builddir=test

    local spec_arg=()
    if [[ $enable_multi ]]; then
        if [[ $enable_shadow ]]; then
            spec_arg=(--specfile ../test/rpm/test.spec)
        else
            spec_arg=(--specfile rpm/test.spec)
        fi
    fi

    if [[ $enable_shadow ]]; then
        if [[ $project_on_cmdln ]]; then
            project_on_cmdln=/$project_on_cmdln
        fi
        project_on_cmdln=../test$project_on_cmdln
        builddir=test-build
    fi

    mkdir -p "$srcdir" || return
    mkdir -p "$builddir" || return

    mkdir "$srcdir/rpm" || return
    cat >"$srcdir/rpm/test.spec" <<END || return
Name:       test

# These would normally come from the environment
%{!?qmake:%define qmake qmake}
%{!?qmake5:%define qmake5 qmake}
%{!?qmake5_install:%define qmake5_install make install INSTALL_ROOT=%{buildroot}}

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

# Make it noop
%define setup echo

Summary:    Test
Version:    0.1
Release:    1
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2
# rpmbuild applies patches in the order they appear here, ignoring their numbering
# the next line has hard tab on purpose - it tests that tabs work
${enable_patch:+Patch1:    test-fix-1.patch}
${enable_patch:+Patch0:  test-fix-2.patch}

%description
Test

%prep
%${setup_method} -n %{name}-%{version}${maybe_setupdir}${enable_patch_auto+ -p1}
${enable_patch_explicit:+%patch1 -p1}
${enable_patch_explicit:+%patch0 -p1}

%build
%qmake5 ${maybe_project_in_spec}
make %{?_smp_mflags}

%install
mkdir -p %{buildroot}
%qmake5_install

%files
%defattr(-,root,root,-)
/foo${enable_patch:+-patched}
END

    mkdir -p "$srcdir/$maybe_subdir" || return
    echo TARGET=foo > "$srcdir/$maybe_subdir/test.pro" || return

    if [[ $enable_multi ]]; then
        touch "$srcdir/rpm/other.spec" || return
        mkdir -p "$srcdir/$maybe_subdir" || return
        echo TARGET=bar > "$srcdir/$maybe_subdir/other.pro" || return
    fi

    if [[ $enable_patch ]]; then
        cat >"$srcdir/rpm/test-fix-1.patch" <<END || return
--- a/test.pro
+++ b/test.pro
@@ -1 +1 @@
-TARGET=foo
+TARGET=foo-patched-xxx
END
        cat >"$srcdir/rpm/test-fix-2.patch" <<END || return
--- a/test.pro
+++ b/test.pro
@@ -1 +1 @@
-TARGET=foo-patched-xxx
+TARGET=foo-patched
END
    fi

    find -ls >&2

    (
        maybe_build_prepare=
        case $enable_patch in
            APPLY) (
                cd "$srcdir" || return
                echo "--- Preparing sources" >&2
                set -x
                "$SELF" build-init || return
                "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} apply || return
                ) || return;;
            PREP) (
                cd "$srcdir" || return
                echo "--- Preparing sources" >&2
                set -x
                "$SELF" build-init || return
                "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} prepare || return
                ) || return;;
            BPREP)
                maybe_build_prepare=--prepare
                ;;
        esac

        (
            cd "$builddir" || return
            echo "--- Running build" >&2
            set -x
            "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} build ${maybe_build_prepare} ${project_on_cmdln:+"$project_on_cmdln"} || return
        ) || return

        rm -rf "$builddir"/{.mb2,RPMS,Makefile,foo} "$buildroot" || return

        (
            cd "$builddir" || return
            echo "--- Running qmake/make/package" >&2
            set -x
            "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} qmake ${project_on_cmdln:+"$project_on_cmdln"} || return
            "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} make || return
            "$SELF" ${spec_arg[@]:+"${spec_arg[@]}"} package || return
        ) || return
    )

    if [[ $? -ne 0 ]]; then
        cat <<END
Test case failed: $TC_NAME
END
        return 1
    fi

    return 0
}

special_packaging_tc_tear_down() {
    rm -rf "$buildroot" || return
    rm -rf ./test ./test-build || return
}

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Flat package"
run_tc special_packaging "Basic" \
    N       N      N      N       N        N         N
run_tc special_packaging "Basic (shadow)" \
    N       N      N      Y       N        N         N
run_tc special_packaging "Pass .pro file in spec" \
    N       N      N      N       Y        N         N
run_tc special_packaging "Pass .pro file in spec (shadow)" \
    N       N      N      Y       Y        N         N
run_tc special_packaging "Pass .pro file on cmdline" \
    N       N      N      N       N        FILE      N
run_tc special_packaging "Pass .pro file in spec and on cmdline (shadow)" \
    N       N      N      Y       Y        FILE      N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Flat package / Multiple configs"
run_tc special_packaging "Basic" \
    N       Y      N      N       Y        N         N
run_tc special_packaging "Basic (shadow)" \
    N       Y      N      Y       Y        N         N
run_tc special_packaging "Pass .pro file" \
    N       Y      N      N       Y        FILE      N
run_tc special_packaging "Pass .pro file (shadow)" \
    N       Y      N      Y       Y        FILE      N
run_tc special_packaging "Pass other .pro file" \
    N       Y      N      N       Y        OTHER     N
run_tc special_packaging "Pass other .pro file (shadow)" \
    N       Y      N      Y       Y        OTHER     N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Flat package / With patches"
run_tc special_packaging "Use apply" \
    N       N      APPLY  N       N        N         N
run_tc special_packaging "Use apply (shadow)" \
    N       N      APPLY  Y       N        N         N
run_tc special_packaging "Use prepare" \
    N       N      PREP   N       N        N         N
run_tc special_packaging "Use prepare (shadow)" \
    N       N      PREP   Y       N        N         N
run_tc special_packaging "Use build --prepare" \
    N       N      BPREP  N       N        N         N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Flat package / With patches and %autosetup"
run_tc special_packaging "Use apply" \
    N       N      APPLY  N       N        N         Y
run_tc special_packaging "Use apply (shadow)" \
    N       N      APPLY  Y       N        N         Y
run_tc special_packaging "Use prepare" \
    N       N      PREP   N       N        N         Y
run_tc special_packaging "Use prepare (shadow)" \
    N       N      PREP   Y       N        N         Y
run_tc special_packaging "Use build --prepare" \
    N       N      BPREP  N       N        N         Y
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Subtree package"
run_tc special_packaging "Basic" \
    Y       N      N      N       N        N         N
run_tc special_packaging "Basic (shadow)" \
    Y       N      N      Y       N        N         N
run_tc special_packaging "Pass .pro file" \
    Y       N      N      N       N        FILE      N
run_tc special_packaging "Pass .pro file (shadow)" \
    Y       N      N      Y       N        FILE      N
run_tc special_packaging "Pass .pro dir" \
    Y       N      N      N       N        DIR       N
run_tc special_packaging "Pass .pro dir (shadow)" \
    Y       N      N      Y       N        DIR       N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Subtree package / Without %setup subdir"
run_tc special_packaging "Basic" \
    NOSET   N      N      N       Y        N         N
run_tc special_packaging "Basic (shadow)" \
    NOSET   N      N      Y       Y        N         N
run_tc special_packaging "Pass .pro file" \
    NOSET   N      N      N       Y        FILE      N
run_tc special_packaging "Pass .pro file (shadow)" \
    NOSET   N      N      Y       Y        FILE      N
run_tc special_packaging "Pass .pro dir" \
    NOSET   N      N      N       Y        DIR       N
run_tc special_packaging "Pass .pro dir (shadow)" \
    NOSET   N      N      Y       Y        DIR       N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Subtree package / Multiple configs"
run_tc special_packaging "Basic" \
    Y       Y      N      N       Y        N         N
run_tc special_packaging "Basic (shadow)" \
    Y       Y      N      Y       Y        N         N
run_tc special_packaging "Pass .pro file" \
    Y       Y      N      N       Y        FILE      N
run_tc special_packaging "Pass .pro file (shadow)" \
    Y       Y      N      Y       Y        FILE      N
run_tc special_packaging "Pass .pro dir" \
    Y       Y      N      N       Y        DIR       N
run_tc special_packaging "Pass .pro dir (shadow)" \
    Y       Y      N      Y       Y        DIR       N
run_tc special_packaging "Pass other .pro file" \
    Y       Y      N      N       Y        OTHER     N
run_tc special_packaging "Pass other .pro file (shadow)" \
    Y       Y      N      Y       Y        OTHER     N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Subtree package / With patches"
run_tc special_packaging "Use apply" \
    Y       N      APPLY  N       N        N         N
run_tc special_packaging "Use apply (shadow)" \
    Y       N      APPLY  Y       N        N         N
run_tc special_packaging "Use prepare" \
    Y       N      PREP   N       N        N         N
run_tc special_packaging "Use prepare (shadow)" \
    Y       N      PREP   Y       N        N         N
run_tc special_packaging "Use build --prepare" \
    Y       N      BPREP  N       N        N         N
tear_down_ts special_packaging

#   SUBDIR  MULTI  PATCH  SHADOW  PROJECT  PROJECT   AUTO
#                         BUILD   IN SPEC  ON CMDLN  SETUP

set_up_ts special_packaging "Compatibility / Subtree package / With patches and %autosetup"
run_tc special_packaging "Use apply" \
    Y       N      APPLY  N       N        N         Y
run_tc special_packaging "Use apply (shadow)" \
    Y       N      APPLY  Y       N        N         Y
run_tc special_packaging "Use prepare" \
    Y       N      PREP   N       N        N         Y
run_tc special_packaging "Use prepare (shadow)" \
    Y       N      PREP   Y       N        N         Y
run_tc special_packaging "Use build --prepare" \
    Y       N      BPREP  N       N        N         Y
tear_down_ts special_packaging

################################################################################
# Test compatibility with various packaging options (cmake based projects)

# In "configure" mode accepts up to one argument - source directory path.
# Defaults to current working directory. Creates Makefile that makes single
# file named after the single executable name expected in CMakeLists.txt and
# installs it directly under the INSTALL_ROOT.
#
# In "build" mode expects "." as the build directory and invokes "make",
# passing optional build-tool arguments and optionally the target name.
special_packaging_cmake_stub() {
    if [[ $1 == --build ]]; then
        if [[ $2 != . ]]; then
            echo "stub cmake: unexpected argument to '--build': '$2'" >&2
            return 1
        fi
        shift 2
        target=
        make_args=()
        while [[ $# -gt 0 ]]; do
            case $1 in
                --target)
                    target=$2
                    shift
                    ;;
                --)
                    shift
                    make_args=("$@")
                    break
                    ;;
                *)
                    ;;
            esac
            shift
        done

        make "$@" $target
        return
    else
        if [[ ! $1 ]]; then
            sources=.
        elif [[ ! -d $1 ]]; then
            echo "stub cmake: directory not found: '$1'" >&2
            return 1
        else
            sources=$1
        fi

        if [[ ! -f $sources/CMakeLists.txt ]]; then
            echo "stub cmake: no CMakeLists.txt in '$1'" >&2
            return 1
        fi

        target=foo
        cat >Makefile <<EOF
all:
	echo $target > $target

install:
	cp $target \$(DESTDIR)/$target
EOF
    fi
}

special_packaging_cmake_ts_set_up() {
    sb2_stub_create || return

    cat >"$STUBS_DIR/cmake" <<END || return
#!/bin/bash
$(declare -f special_packaging_cmake_stub)
special_packaging_cmake_stub "\$@"
END
    chmod +x "$STUBS_DIR/cmake" || return
}

special_packaging_cmake_ts_tear_down() {
    sb2_stub_remove || return
    rm -f "$STUBS_DIR/cmake" || return
}

# Accepts 5 arguments described inline
special_packaging_cmake_tc() {
    # SUBDIR: Put sources under subdirectory
    #   - N: No
    #   - Y: Yes, pass it to the %setup macro
    #   - NOSET: Yes, do not pass it to the %setup macro
    local maybe_subdir=$([[ $1 == Y || $1 == NOSET ]] && echo test)
    local maybe_setupdir=$([[ $1 == Y ]] && echo "/%{name}")

    # SHADOW BUILD: Do shadow build
    #   - Y/N
    local enable_shadow=$([[ $2 == Y ]] && echo 1)

    # PROJECT IN SPEC: Pass project directory to cmake call in the .spec file
    #   - Y/N
    local maybe_project_in_spec=
    if [[ $3 == Y ]]; then
        if [[ ! $maybe_subdir || $maybe_setupdir ]]; then
            maybe_project_in_spec=.
        else
            maybe_project_in_spec=$maybe_subdir
        fi
    fi

    # PROJECT ON CMDLN: Pass project directory on command line
    #   - Y/N
    local project_on_cmdln=$([[ $4 == Y ]] && echo ${maybe_subdir:-.})

    # CMAKE BUILD: Use "cmake --build" instead of raw "make" in the .spec file
    #   - Y/N
    local use_cmake_build=$([[ $5 == Y ]] && echo 1)

    local srcdir=test
    local builddir=test

    if [[ $enable_shadow ]]; then
        if [[ $project_on_cmdln ]]; then
            project_on_cmdln=/$project_on_cmdln
        fi
        project_on_cmdln=../test$project_on_cmdln
        builddir=test-build
    fi

    mkdir -p "$srcdir" || return
    mkdir -p "$builddir" || return

    mkdir "$srcdir/rpm" || return
    cat >"$srcdir/rpm/test.spec" <<END || return
Name:       test

# These would normally come from the environment
%{!?cmake:%define cmake cmake}

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

# Make it noop
%define setup echo

Summary:    Test
Version:    0.1
Release:    1
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2

%description
Test

%prep
%setup -n %{name}-%{version}${maybe_setupdir}

%build
%cmake ${maybe_project_in_spec}
if [[ -n '${use_cmake_build}' ]]; then
    cmake --build .
else
    make %{?_smp_mflags}
fi

%install
mkdir -p %{buildroot}
make DESTDIR=%{buildroot} install

%files
%defattr(-,root,root,-)
/foo
END

    mkdir -p "$srcdir/$maybe_subdir" || return
    echo > "$srcdir/$maybe_subdir/CMakeLists.txt" || return

    find -ls >&2

    (
        (
            cd "$builddir" || return
            echo "--- Running build" >&2
            set -x
            "$SELF" build ${project_on_cmdln:+"$project_on_cmdln"} || return
        ) || return

        rm -rf "$builddir"/{.mb2,RPMS,Makefile,foo} "$buildroot" || return

        (
            cd "$builddir" || return
            echo "--- Running cmake/cmake --build/package" >&2
            set -x
            "$SELF" cmake ${project_on_cmdln:+"$project_on_cmdln"} || return
            "$SELF" cmake --build . || return
            "$SELF" package || return
        ) || return

        rm -rf "$builddir"/{.mb2,RPMS,Makefile,foo} "$buildroot" || return

        (
            cd "$builddir" || return
            echo "--- Running cmake/make/package" >&2
            set -x
            "$SELF" cmake ${project_on_cmdln:+"$project_on_cmdln"} || return
            "$SELF" make || return
            "$SELF" package || return
        ) || return
    )

    if [[ $? -ne 0 ]]; then
        cat <<END
Test case failed: $TC_NAME
END
        return 1
    fi

    return 0
}

special_packaging_cmake_tc_tear_down() {
    rm -rf "$buildroot" || return
    rm -rf ./test ./test-build || return
}

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Flat package"
run_tc special_packaging_cmake "Basic" \
    N       N       N        N         N
run_tc special_packaging_cmake "Basic (shadow)" \
    N       Y       N        N         N
run_tc special_packaging_cmake "Pass project dir in spec" \
    N       N       Y        N         N
run_tc special_packaging_cmake "Pass project dir in spec (shadow)" \
    N       Y       Y        N         N
run_tc special_packaging_cmake "Pass project dir on cmdline" \
    N       N       N        Y         N
run_tc special_packaging_cmake "Pass project dir in spec and on cmdline (shadow)" \
    N       Y       Y        Y         N
tear_down_ts special_packaging_cmake

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Flat package / cmake --build"
run_tc special_packaging_cmake "Basic" \
    N       N       N        N         Y
run_tc special_packaging_cmake "Basic (shadow)" \
    N       Y       N        N         Y
run_tc special_packaging_cmake "Pass project dir in spec" \
    N       N       Y        N         Y
run_tc special_packaging_cmake "Pass project dir in spec (shadow)" \
    N       Y       Y        N         Y
run_tc special_packaging_cmake "Pass project dir on cmdline" \
    N       N       N        Y         Y
run_tc special_packaging_cmake "Pass project dir in spec and on cmdline (shadow)" \
    N       Y       Y        Y         Y
tear_down_ts special_packaging_cmake

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Subtree package"
run_tc special_packaging_cmake "Basic" \
    Y       N       N        N         N
run_tc special_packaging_cmake "Basic (shadow)" \
    Y       Y       N        N         N
run_tc special_packaging_cmake "Pass project dir" \
    Y       N       N        Y         N
run_tc special_packaging_cmake "Pass project dir (shadow)" \
    Y       Y       N        Y         N
tear_down_ts special_packaging_cmake

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Subtree package / cmake --build"
run_tc special_packaging_cmake "Basic" \
    Y       N       N        N         Y
run_tc special_packaging_cmake "Basic (shadow)" \
    Y       Y       N        N         Y
run_tc special_packaging_cmake "Pass project dir" \
    Y       N       N        Y         Y
run_tc special_packaging_cmake "Pass project dir (shadow)" \
    Y       Y       N        Y         Y
tear_down_ts special_packaging_cmake

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Subtree package / Without %setup subdir"
run_tc special_packaging_cmake "Basic" \
    NOSET   N       Y        N         N
run_tc special_packaging_cmake "Basic (shadow)" \
    NOSET   Y       Y        N         N
run_tc special_packaging_cmake "Pass project dir" \
    NOSET   N       Y        Y         N
run_tc special_packaging_cmake "Pass project dir (shadow)" \
    NOSET   Y       Y        Y         N
tear_down_ts special_packaging_cmake

#   SUBDIR  SHADOW  PROJECT  PROJECT   CMAKE
#           BUILD   IN SPEC  ON CMDLN  BUILD

set_up_ts special_packaging_cmake "Compatibility / CMake / Subtree package / Without %setup subdir / cmake --build"
run_tc special_packaging_cmake "Basic" \
    NOSET   N       Y        N         Y
run_tc special_packaging_cmake "Basic (shadow)" \
    NOSET   Y       Y        N         Y
run_tc special_packaging_cmake "Pass project dir" \
    NOSET   N       Y        Y         Y
run_tc special_packaging_cmake "Pass project dir (shadow)" \
    NOSET   Y       Y        Y         Y
tear_down_ts special_packaging_cmake

###############################################################################
# Test compatibility with various packaging options (Autotools based projects)

# Checks that 'configure.ac' exists and creates 'configure' from template.
# Checks that 'Makefile.am' exists and copies it as 'Makefile.in'.
special_packaging_autotools_autoreconf_stub() {
    if [[ $# -ne 0 ]]; then
        echo "stub autoreconf: unexpected argument: '$1'" >&2
        return 1
    fi

    if [[ ! -e Makefile.am ]]; then
        echo "stub autoreconf: no Makefile.am under '$PWD'" >&2
        return 1
    fi

    if [[ $(head -n1 Makefile.am) != "# bogus" ]]; then
        echo "stub autoreconf: unexpected content in Makefile.am under '$PWD'" >&2
        return 1
    fi

    cp Makefile.am Makefile.in || return

    if [[ ! -e configure.ac ]]; then
        echo "stub autoreconf: no configure.ac under '$PWD'" >&2
        return 1
    fi

    if [[ $(<configure.ac) != "dnl bogus" ]]; then
        echo "stub autoreconf: unexpected content in configure.ac under '$PWD'" >&2
        return 1
    fi

    # Creates 'Makefile' from 'Makefile.in', replacing @SRCDIR@ and @OUTDIR@
    special_packaging_autotools_configure_template() {
        local srcdir=$(dirname "$0")
        if [[ ! -f $srcdir/Makefile.in ]]; then
            echo "$0: no Makefile.in under '$srcdir'" >&2
            return 1
        fi
        sed < "$srcdir/Makefile.in" > ./Makefile \
            -e "s,@SRCDIR@,$srcdir,g" \
            -e "s,@OUTDIR@,$PWD,g"
    }

    # Cannot do '|| return' here. BASH would produce an invalid dump of
    # special_packaging_autotools_autoreconf_stub with 'declare -f', putting
    # 'return' on a separate line after 'END'
    cat <<END > configure #|| return
#!/bin/bash
$(declare -f special_packaging_autotools_configure_template)
special_packaging_autotools_configure_template "\$@"
END
    chmod +x configure || return
}

special_packaging_autotools_ts_set_up() {
    sb2_stub_create || return

    cat >"$STUBS_DIR/autoreconf" <<END || return
#!/bin/bash
$(declare -f special_packaging_autotools_autoreconf_stub)
special_packaging_autotools_autoreconf_stub "\$@"
END
    chmod +x "$STUBS_DIR/autoreconf" || return
}

special_packaging_autotools_ts_tear_down() {
    sb2_stub_remove || return
    rm -f "$STUBS_DIR/autotools" || return
}

# Accepts 2 arguments described inline
special_packaging_autotools_tc() {
    # SUBDIR: Put sources under subdirectory
    #   - N: No
    #   - Y: Yes
    local maybe_subdir=$([[ $1 == Y ]] && echo test)
    local maybe_setupdir=$([[ $1 == Y ]] && echo "/%{name}")

    # SHADOW BUILD: Do shadow build
    #   - Y/N
    local enable_shadow=$([[ $2 == Y ]] && echo 1)

    local srcdir=test
    local maybe_srcdir_on_cmdln=
    local builddir=test

    if [[ $enable_shadow ]]; then
        maybe_srcdir_on_cmdln=../test
        builddir=test-build
    fi

    mkdir -p "$srcdir" || return
    mkdir -p "$builddir" || return

    mkdir "$srcdir/rpm" || return
    cat >"$srcdir/rpm/test.spec" <<END || return
Name:       test

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

# Make it noop
%define setup echo

Summary:    Test
Version:    0.1
Release:    1
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2

%description
Test

%prep
%setup -n %{name}-%{version}${maybe_setupdir}

%build
./autogen.sh
./configure
make

%install
mkdir -p %{buildroot}
make DESTDIR=%{buildroot} install

%files
%defattr(-,root,root,-)
/foo
END

    mkdir -p "$srcdir/$maybe_subdir" || return

    echo foo > "$srcdir/$maybe_subdir/foo.src" || return

    cat <<END > "$srcdir/$maybe_subdir/Makefile.am" || return
# bogus
all:
	cat "@SRCDIR@/foo.src" > "@OUTDIR@/foo"

install:
	cp "@OUTDIR@/foo" \$(DESTDIR)/foo
END

    cat <<END > "$srcdir/$maybe_subdir/configure.ac" || return
dnl bogus
END

    cat <<END > "$srcdir/$maybe_subdir/autogen.sh" || return
#!/bin/bash
autoreconf
END
    chmod +x "$srcdir/$maybe_subdir/autogen.sh" || return

    find -ls >&2

    (
        cd "$builddir" || return

        (
            echo "--- Running build" >&2
            set -x
            "$SELF" build ${maybe_srcdir_on_cmdln:+"$maybe_srcdir_on_cmdln"} || return
        ) || return

        (
            echo "--- Running build-shell-make/package" >&2
            set -x
            "$SELF" build-shell make ${maybe_subdir:+-C "$maybe_subdir"} >&2 || return
            "$SELF" package || return
        ) || return
    )

    if [[ $? -ne 0 ]]; then
        cat <<END
Test case failed: $TC_NAME
END
        return 1
    fi

    return 0
}

special_packaging_autotools_tc_tear_down() {
    rm -rf "$buildroot" || return
    rm -rf ./test ./test-build || return
}

#   SUBDIR  SHADOW
#           BUILD

set_up_ts special_packaging_autotools "Compatibility / Autotools"
run_tc special_packaging_autotools "Flat package" \
    N       N
run_tc special_packaging_autotools "Flat package (shadow)" \
    N       Y
run_tc special_packaging_autotools "Subtree package" \
    Y       N
run_tc special_packaging_autotools "Subtree package (shadow)" \
    Y       Y
tear_down_ts special_packaging_autotools

################################################################################
# Test the match_multi function

match_multi_ts_set_up() {
    :
}

match_multi_ts_tear_down() {
    :
}

match_multi_tc() {
    local match_name=$1 no_match_name=$2 patterns=("${@:3}")

    local rc=0 problems=()

    if [[ $match_name ]] && ! match_multi "$match_name" ${patterns:+"${patterns[@]}"}; then
        rc=1
        problems+=("Not matched while it should")
    fi

    if [[ $no_match_name ]] && match_multi "$no_match_name" ${patterns:+"${patterns[@]}"}; then
        rc=1
        problems+=("Matched while it should not")
    fi

    if [[ $rc != 0 ]]; then
        cat <<END
Test case failed: $TC_NAME
END
        printf '    %s\n' "${problems[@]}"
    fi

    return $rc
}

set_up_ts match_multi "Test match_multi"
run_tc match_multi "No filter" foobar ""
run_tc match_multi "Implicit inclusion only" foobar doobar "foo*"
run_tc match_multi "Explicit inclusion only" foobar doobar "+foo*"
run_tc match_multi "Explicit exclusion only" foobar doobar "-doo*"
run_tc match_multi "Override exclusion" foobar doobar "-*bar" "foo*"
run_tc match_multi "Override exclusion - keep not excluded" bazbaz doobar "-*bar" "foo*"
run_tc match_multi "Override inclusion" foobar doobar "*bar" "-doo*"
run_tc match_multi "Override exclusion later" foobar doobar "-*bar" "zoo*" "foo*"
run_tc match_multi "Override exclusion later - keep not excluded" bazbaz doobar "-*bar" "zoo*" "foo*"
run_tc match_multi "Override inclusion later" foobar doobar "*bar" "zoo*" "-doo*"
run_tc match_multi "Extglob" foobar doobar "foo@(bar|baz)"
tear_down_ts match_multi

################################################################################
# Test find_latest_rpms

make_test_package() {
    local name=$1 version=$2 release=$3

    # Create a minimal spec file
    mkdir -p rpm || return
    cat >rpm/test.spec <<END || return
Name:       ${name}

# This is to not require meego-rpm-config installed for self-test execution
%global __os_install_post %{nil}

Summary:    Test
Version:    ${version}
Release:    ${release}
Group:      Qt/Qt
License:    LICENSE
URL:        http://example.org/
Source0:    %{name}-%{version}.tar.bz2

%description
Test

%build

%install
mkdir -p %{buildroot}
touch %{buildroot}/foo

%files
%defattr(-,root,root,-)
/foo
END
    # Set output-dir explicitly to prevent removal of previously built packages
    mkdir -p RPMS || return
    "$SELF" --output-dir RPMS build >&2 || return
}

find_latest_rpms_tc() {
    sb2_stub_create || return

    make_test_package "test1" "1.2.3" "1" || return
    make_test_package "test1" "1.2.3+jb12345.20200202202020.deadbeef" "1" || return
    make_test_package "test1" "1.2.3+jb12345.20200222222222.beefbabe" "1" || return
    make_test_package "test2" "3.2.1" "1" || return
    make_test_package "test2" "3.3" "1" || return

    local expected=$(cat <<END
RPMS/test1-1.2.3+jb12345.20200222222222.beefbabe-1.i486.rpm
RPMS/test2-3.3-1.i486.rpm
END
)
    local actual=
    actual=$(set +o nounset; find_latest_rpms "RPMS") || return

    if [[ $actual != "$expected" ]]; then
        cat <<END
Test case failed: $TC_NAME
  ** Expected   ** [[
$expected
]]
  ** Actual     ** [[
$actual
]]
END
        return 1
    fi

    return 0
}

find_latest_rpms_tc_tear_down() {
    sb2_stub_remove || return
    rm -f rpm/test.spec || return
    rmdir rpm || return
    rm -f RPMS/test1-1.2.3-1.i486.rpm || return
    rm -f RPMS/test1-1.2.3+jb12345.20200202202020.deadbeef-1.i486.rpm || return
    rm -f RPMS/test1-1.2.3+jb12345.20200222222222.beefbabe-1.i486.rpm || return
    rm -f RPMS/test2-3.2.1-1.i486.rpm || return
    rm -f RPMS/test2-3.3-1.i486.rpm || return
    ! [[ -d RPMS ]] || rmdir RPMS || return
    rm -f "$buildroot/foo" || return
}

run_tc find_latest_rpms "Test find_latest_rpms"

################################################################################

if [[ $tc_failed_num -eq 0 ]]; then
    echo "*** All $tc_num tests passed"
else
    echo "*** $tc_failed_num out of $tc_num failed"
fi

# For Emacs:
# Local Variables:
# indent-tabs-mode:nil
# tab-width:4
# mode: sh
# End:
# For VIM:
# vim:set softtabstop=4 shiftwidth=4 tabstop=4 expandtab:
