aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/main.yml2
-rw-r--r--.gitlab-ci.yml1
-rw-r--r--Documentation/RelNotes/2.50.0.adoc68
-rw-r--r--Documentation/config/maintenance.adoc17
-rw-r--r--Documentation/git-branch.adoc2
-rw-r--r--Documentation/git-cat-file.adoc6
-rw-r--r--Documentation/git-maintenance.adoc8
-rw-r--r--Documentation/git-reflog.adoc2
-rw-r--r--Documentation/git-send-email.adoc80
-rw-r--r--Documentation/gitcredentials.adoc4
-rw-r--r--Documentation/scalar.adoc31
-rw-r--r--Documentation/technical/bundle-uri.adoc14
-rw-r--r--Makefile4
-rw-r--r--builtin/am.c4
-rw-r--r--builtin/cat-file.c34
-rw-r--r--builtin/clone.c4
-rw-r--r--builtin/count-objects.c2
-rw-r--r--builtin/fast-import.c3
-rw-r--r--builtin/fetch.c15
-rw-r--r--builtin/fsck.c13
-rw-r--r--builtin/gc.c150
-rw-r--r--builtin/hash-object.c69
-rw-r--r--builtin/index-pack.c65
-rw-r--r--builtin/receive-pack.c4
-rw-r--r--builtin/remote.c3
-rw-r--r--builtin/replay.c65
-rw-r--r--builtin/rev-list.c2
-rw-r--r--builtin/show-ref.c3
-rw-r--r--builtin/unpack-objects.c3
-rw-r--r--bulk-checkin.c3
-rw-r--r--bundle-uri.c2
-rw-r--r--cache-tree.c13
-rwxr-xr-xci/install-dependencies.sh21
-rwxr-xr-xci/run-build-and-tests.sh6
-rwxr-xr-xci/run-test-slice.sh5
-rw-r--r--commit-graph.c4
-rw-r--r--compiler-tricks/not-constant.c2
-rw-r--r--config.mak.uname76
-rw-r--r--contrib/buildsystems/Generators.pm42
-rw-r--r--contrib/buildsystems/Generators/QMake.pm189
-rw-r--r--contrib/buildsystems/Generators/Vcproj.pm579
-rw-r--r--contrib/buildsystems/Generators/Vcxproj.pm402
-rwxr-xr-xcontrib/buildsystems/engine.pl395
-rwxr-xr-xcontrib/buildsystems/generate29
-rwxr-xr-xcontrib/buildsystems/parse.pl228
-rw-r--r--convert.c2
-rw-r--r--diffcore-rename.c2
-rw-r--r--dir.c5
-rw-r--r--fetch-pack.c7
-rwxr-xr-xgit-send-email.perl53
-rw-r--r--http-push.c11
-rw-r--r--http-walker.c9
-rw-r--r--http.c4
-rw-r--r--json-writer.c4
-rw-r--r--json-writer.h171
-rw-r--r--list-objects-filter.c2
-rw-r--r--list-objects.c3
-rw-r--r--log-tree.c2
-rw-r--r--mailinfo.c42
-rw-r--r--meson.build21
-rw-r--r--meson_options.txt4
-rw-r--r--notes.c3
-rw-r--r--object-file.c146
-rw-r--r--object-file.h98
-rw-r--r--object-name.c2
-rw-r--r--object-store.c64
-rw-r--r--object-store.h201
-rw-r--r--oidmap.c2
-rw-r--r--oidmap.h9
-rw-r--r--pack-bitmap.c8
-rw-r--r--pack-objects.h1
-rw-r--r--packfile.c7
-rw-r--r--packfile.h78
-rw-r--r--path.c14
-rw-r--r--path.h7
-rw-r--r--perl/FromCPAN/Mail/meson.build2
-rw-r--r--perl/FromCPAN/meson.build2
-rw-r--r--perl/Git/LoadCPAN/Mail/meson.build2
-rw-r--r--perl/Git/LoadCPAN/meson.build2
-rw-r--r--perl/Git/SVN/Memoize/meson.build2
-rw-r--r--perl/Git/SVN/meson.build2
-rw-r--r--perl/Git/meson.build2
-rw-r--r--perl/meson.build2
-rw-r--r--prune-packed.c2
-rw-r--r--reachable.c2
-rw-r--r--reflog.c3
-rw-r--r--refs.c2
-rw-r--r--reftable/block.c7
-rw-r--r--reftable/reftable-block.h3
-rw-r--r--reftable/table.c11
-rw-r--r--reftable/writer.c8
-rw-r--r--remote.c2
-rw-r--r--replace-object.c8
-rw-r--r--replace-object.h2
-rw-r--r--scalar.c65
-rw-r--r--send-pack.c5
-rw-r--r--sequencer.c128
-rw-r--r--shallow.c9
-rw-r--r--streaming.c2
-rw-r--r--t/helper/meson.build2
-rw-r--r--t/helper/test-pack-deltas.c148
-rw-r--r--t/helper/test-tool.c2
-rw-r--r--t/helper/test-tool.h2
-rw-r--r--t/helper/test-zlib.c62
-rw-r--r--t/lib-loose.sh30
-rw-r--r--t/meson.build2
-rwxr-xr-xt/t1001-read-tree-m-2way.sh2
-rwxr-xr-xt/t1006-cat-file.sh216
-rwxr-xr-xt/t1007-hash-object.sh11
-rwxr-xr-xt/t1450-fsck.sh32
-rwxr-xr-xt/t1512-rev-parse-disambiguation.sh5
-rwxr-xr-xt/t3430-rebase-merges.sh11
-rw-r--r--t/t4018/bash-bashism-style-complete-line-capture4
-rw-r--r--t/t4018/bash-posix-style-complete-line-capture4
-rw-r--r--t/t4018/bash-posix-style-single-command-function3
-rwxr-xr-xt/t4034-diff-words.sh1
-rw-r--r--t/t4034/bash/expect36
-rw-r--r--t/t4034/bash/post31
-rw-r--r--t/t4034/bash/pre31
-rwxr-xr-xt/t4071-diff-minimal.sh14
-rwxr-xr-xt/t5309-pack-delta-cycles.sh34
-rwxr-xr-xt/t5558-clone-bundle-uri.sh202
-rwxr-xr-xt/t6011-rev-list-with-bad-commit.sh1
-rwxr-xr-xt/t6137-pathspec-wildcards-literal.sh429
-rwxr-xr-xt/t7900-maintenance.sh115
-rwxr-xr-xt/t9210-scalar.sh26
-rwxr-xr-xt/t9211-scalar-clone.sh11
-rw-r--r--t/unit-tests/t-reftable-block.c15
-rw-r--r--t/unit-tests/u-oidmap.c2
-rw-r--r--tree-walk.h13
-rw-r--r--upload-pack.c3
-rw-r--r--userdiff.c26
-rw-r--r--walker.c3
-rw-r--r--wrapper.c21
-rw-r--r--xdiff/xprepare.c5
135 files changed, 2408 insertions, 3078 deletions
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 49bcd0a2a9..7dbf9f7f12 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -265,7 +265,7 @@ jobs:
run: pip install meson ninja
- name: Setup
shell: pwsh
- run: meson setup build --vsenv -Dperl=disabled -Dcredential_helpers=wincred
+ run: meson setup build --vsenv -Dbuildtype=release -Dperl=disabled -Dcredential_helpers=wincred
- name: Compile
shell: pwsh
run: meson compile -C build
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4798b28374..bb6d5b976c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -173,7 +173,6 @@ build:msvc-meson:
test:msvc-meson:
extends: .msvc-meson
stage: test
- when: manual
timeout: 6h
needs:
- job: "build:msvc-meson"
diff --git a/Documentation/RelNotes/2.50.0.adoc b/Documentation/RelNotes/2.50.0.adoc
index 07759cf98b..f721ea350d 100644
--- a/Documentation/RelNotes/2.50.0.adoc
+++ b/Documentation/RelNotes/2.50.0.adoc
@@ -53,6 +53,29 @@ UI, Workflows & Features
* The build procedure installs bash (but not zsh) completion script.
+ * send-email has been updated to work better with Outlook's smtp server.
+
+ * "git diff --minimal" used to give non-minimal output when its
+ optimization kicked in, which has been disabled.
+
+ * "git index-pack --fix-thin" used to abort to prevent a cycle in
+ delta chains from forming in a corner case even when there is no
+ such cycle.
+
+ * Make repository clean-up tasks "gc" can do available to "git
+ maintenance" front-end.
+
+ * Bundle-URI feature did not use refs recorded in the bundle other
+ than normal branches as anchoring points to optimize the follow-up
+ fetch during "git clone"; now it is told to utilize all.
+
+ * The `send-email` documentation has been updated with OAuth2.0
+ related examples.
+
+ * Two of the "scalar" subcommands that add a repository that hasn't
+ been under "scalar"'s control are taught an option not to enable the
+ scheduled maintenance on it.
+
Performance, Internal Implementation, Development Support etc.
--------------------------------------------------------------
@@ -134,6 +157,16 @@ Performance, Internal Implementation, Development Support etc.
* Add an equivalent to "make hdr-check" target to meson based builds.
+ * Further code clean-up in the object-store layer.
+
+ * Build performance fix.
+
+ * Teach "git send-email" to also consult `hostname -f` for mail
+ domain to compute the identity given to SMTP servers.
+
+ * The dependency on the_repository variable has been reduced from the
+ code paths in "git replay".
+
Fixes since v2.49
-----------------
@@ -261,6 +294,37 @@ Fixes since v2.49
now detected and the command errors out.
(merge 974f0d4664 ps/mv-contradiction-fix later to maint).
+ * Further refinement on CI messages when an optional external
+ software is unavailable (e.g. due to third-party service outage).
+ (merge 956acbefbd jc/ci-skip-unavailable-external-software later to maint).
+
+ * Test result aggregation did not work in Meson based CI jobs.
+ (merge bd38ed5be1 ps/ci-test-aggreg-fix-for-meson later to maint).
+
+ * Code clean-up around stale CI elements and building with Visual Studio.
+ (merge a7b060f67f js/ci-buildsystems-cleanup later to maint).
+
+ * "git add 'f?o'" did not add 'foo' if 'f?o', an unusual pathname,
+ also existed on the working tree, which has been corrected.
+ (merge ec727e189c kj/glob-path-with-special-char later to maint).
+
+ * The fallback implementation of open_nofollow() depended on
+ open("symlink", O_NOFOLLOW) to set errno to ELOOP, but a few BSD
+ derived systems use different errno, which has been worked around.
+ (merge f47bcc3413 cf/wrapper-bsd-eloop later to maint).
+
+ * Use-after-free fix in the sequencer.
+ (merge 5dbaec628d pw/sequencer-reflog-use-after-free later to maint).
+
+ * win+Meson CI pipeline, unlike other pipelines for Windows,
+ used to build artifacts in develper mode, which has been changed to
+ build them in release mode for consistency.
+ (merge 184abdcf05 js/ci-build-win-in-release-mode later to maint).
+
+ * CI settings at GitLab has been updated to run MSVC based Meson job
+ automatically (as opposed to be done only upon manual request).
+ (merge 6389579b2f ps/ci-gitlab-enable-msvc-meson-job later to maint).
+
* Other code cleanup, docfix, build fix, etc.
(merge 227c4f33a0 ja/doc-block-delimiter-markup-fix later to maint).
(merge 2bfd3b3685 ab/decorate-code-cleanup later to maint).
@@ -287,3 +351,7 @@ Fixes since v2.49
(merge 1aa50636fd jk/p5332-testfix later to maint).
(merge 42cf4ac552 ps/ci-resurrect-p4-on-github later to maint).
(merge 104add8368 js/diff-codeql-false-positive-workaround later to maint).
+ (merge f62977b93c en/get-tree-entry-doc later to maint).
+ (merge e5dd0a05ed ly/am-split-stgit-leakfix later to maint).
+ (merge bac220e154 rc/t1001-test-path-is-file later to maint).
+ (merge 91db6c735d ly/reftable-writer-leakfix later to maint).
diff --git a/Documentation/config/maintenance.adoc b/Documentation/config/maintenance.adoc
index 41536162a7..2f71934218 100644
--- a/Documentation/config/maintenance.adoc
+++ b/Documentation/config/maintenance.adoc
@@ -83,3 +83,20 @@ maintenance.reflog-expire.auto::
positive value implies the command should run when the number of
expired reflog entries in the "HEAD" reflog is at least the value of
`maintenance.loose-objects.auto`. The default value is 100.
+
+maintenance.rerere-gc.auto::
+ This integer config option controls how often the `rerere-gc` task
+ should be run as part of `git maintenance run --auto`. If zero, then
+ the `rerere-gc` task will not run with the `--auto` option. A negative
+ value will force the task to run every time. Otherwise, any positive
+ value implies the command will run when the "rr-cache" directory exists
+ and has at least one entry, regardless of whether it is stale or not.
+ This heuristic may be refined in the future. The default value is 1.
+
+maintenance.worktree-prune.auto::
+ This integer config option controls how often the `worktree-prune` task
+ should be run as part of `git maintenance run --auto`. If zero, then
+ the `worktree-prune` task will not run with the `--auto` option. A
+ negative value will force the task to run every time. Otherwise, a
+ positive value implies the command should run when the number of
+ prunable worktrees exceeds the value. The default value is 1.
diff --git a/Documentation/git-branch.adoc b/Documentation/git-branch.adoc
index 50a1e13e1f..c0afddc424 100644
--- a/Documentation/git-branch.adoc
+++ b/Documentation/git-branch.adoc
@@ -373,7 +373,7 @@ $ git branch -D test <2>
------------
+
<1> Delete the remote-tracking branches "todo", "html" and "man". The next
- `git fetch` or `git pullè will create them again unless you configure them not to.
+ `git fetch` or `git pull` will create them again unless you configure them not to.
See linkgit:git-fetch[1].
<2> Delete the "test" branch even if the "master" branch (or whichever branch
is currently checked out) does not have all commits from the test branch.
diff --git a/Documentation/git-cat-file.adoc b/Documentation/git-cat-file.adoc
index fc4b92f104..cde79ad242 100644
--- a/Documentation/git-cat-file.adoc
+++ b/Documentation/git-cat-file.adoc
@@ -9,8 +9,7 @@ SYNOPSIS
--------
[verse]
'git cat-file' <type> <object>
-'git cat-file' (-e | -p) <object>
-'git cat-file' (-t | -s) [--allow-unknown-type] <object>
+'git cat-file' (-e | -p | -t | -s) <object>
'git cat-file' (--textconv | --filters)
[<rev>:<path|tree-ish> | --path=<path|tree-ish> <rev>]
'git cat-file' (--batch | --batch-check | --batch-command) [--batch-all-objects]
@@ -202,9 +201,6 @@ flush::
only once, even if it is stored multiple times in the
repository.
---allow-unknown-type::
- Allow `-s` or `-t` to query broken/corrupt objects of unknown type.
-
--follow-symlinks::
With `--batch` or `--batch-check`, follow symlinks inside the
repository when requesting objects with extended SHA-1
diff --git a/Documentation/git-maintenance.adoc b/Documentation/git-maintenance.adoc
index 3a1e2a69b6..931f3e02e8 100644
--- a/Documentation/git-maintenance.adoc
+++ b/Documentation/git-maintenance.adoc
@@ -166,6 +166,14 @@ reflog-expire::
The `reflog-expire` task deletes any entries in the reflog older than the
expiry threshold. See linkgit:git-reflog[1] for more information.
+rerere-gc::
+ The `rerere-gc` task invokes garbage collection for stale entries in
+ the rerere cache. See linkgit:git-rerere[1] for more information.
+
+worktree-prune::
+ The `worktree-prune` task deletes stale or broken worktrees. See
+ linkit:git-worktree[1] for more information.
+
OPTIONS
-------
--auto::
diff --git a/Documentation/git-reflog.adoc b/Documentation/git-reflog.adoc
index b55c060569..412f06b8fe 100644
--- a/Documentation/git-reflog.adoc
+++ b/Documentation/git-reflog.adoc
@@ -138,7 +138,7 @@ Options for `delete`
used with `expire`.
Options for `drop`
-~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~
--all::
Drop the reflogs of all references from all worktrees.
diff --git a/Documentation/git-send-email.adoc b/Documentation/git-send-email.adoc
index 7f223db42d..26fda63c2f 100644
--- a/Documentation/git-send-email.adoc
+++ b/Documentation/git-send-email.adoc
@@ -115,6 +115,19 @@ illustration below where `[PATCH v2 0/3]` is in reply to `[PATCH 0/2]`:
Only necessary if --compose is also set. If --compose
is not set, this will be prompted for.
+--[no-]outlook-id-fix::
+ Microsoft Outlook SMTP servers discard the Message-ID sent via email and
+ assign a new random Message-ID, thus breaking threads.
++
+With `--outlook-id-fix`, 'git send-email' uses a mechanism specific to
+Outlook servers to learn the Message-ID the server assigned to fix the
+threading. Use it only when you know that the server reports the
+rewritten Message-ID the same way as Outlook servers do.
++
+Without this option specified, the fix is done by default when talking
+to 'smtp.office365.com' or 'smtp-mail.outlook.com'. Use
+`--no-outlook-id-fix` to disable even when talking to these two servers.
+
--subject=<string>::
Specify the initial subject of the email thread.
Only necessary if --compose is also set. If --compose
@@ -496,12 +509,12 @@ include::includes/cmd-config-section-all.adoc[]
include::config/sendemail.adoc[]
-EXAMPLES
---------
-Use gmail as the smtp server
+EXAMPLES OF SMTP SERVERS
+------------------------
+Use Gmail as the SMTP Server
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-To use 'git send-email' to send your patches through the GMail SMTP server,
-edit ~/.gitconfig to specify your account settings:
+To use `git send-email` to send your patches through the Gmail SMTP server,
+edit `~/.gitconfig` to specify your account settings:
----
[sendemail]
@@ -515,6 +528,41 @@ If you have multi-factor authentication set up on your Gmail account, you can
generate an app-specific password for use with 'git send-email'. Visit
https://security.google.com/settings/security/apppasswords to create it.
+You can also use OAuth2.0 authentication with Gmail. `OAUTHBEARER` and
+`XOAUTH2` are common methods used for this type of authentication. Gmail
+supports both of them. As an example, if you want to use `OAUTHBEARER`, edit
+your `~/.gitconfig` file and add `smtpAuth = OAUTHBEARER` to your account
+settings:
+
+----
+[sendemail]
+ smtpEncryption = tls
+ smtpServer = smtp.gmail.com
+ smtpUser = yourname@gmail.com
+ smtpServerPort = 587
+ smtpAuth = OAUTHBEARER
+----
+
+Use Microsoft Outlook as the SMTP Server
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Unlike Gmail, Microsoft Outlook no longer supports app-specific passwords.
+Therefore, OAuth2.0 authentication must be used for Outlook. Also, it only
+supports `XOAUTH2` authentication method.
+
+Edit `~/.gitconfig` to specify your account settings for Outlook and use its
+SMTP server with `git send-email`:
+
+----
+[sendemail]
+ smtpEncryption = tls
+ smtpServer = smtp.office365.com
+ smtpUser = yourname@outlook.com
+ smtpServerPort = 587
+ smtpAuth = XOAUTH2
+----
+
+SENDING PATCHES
+---------------
Once your commits are ready to be sent to the mailing list, run the
following commands:
@@ -523,9 +571,25 @@ following commands:
$ git send-email outgoing/*
The first time you run it, you will be prompted for your credentials. Enter the
-app-specific or your regular password as appropriate. If you have credential
-helper configured (see linkgit:git-credential[1]), the password will be saved in
-the credential store so you won't have to type it the next time.
+app-specific or your regular password as appropriate.
+
+If you have a credential helper configured (see linkgit:git-credential[1]), the
+password will be saved in the credential store so you won't have to type it the
+next time.
+
+If you are using OAuth2.0 authentication, you need to use an access token in
+place of a password when prompted. Various OAuth2.0 token generators are
+available online. Community maintained credential helpers for Gmail and Outlook
+are also available:
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-gmail]
+ (cross platform, dedicated helper for authenticating Gmail accounts)
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-outlook]
+ (cross platform, dedicated helper for authenticating Microsoft Outlook accounts)
+
+You can also see linkgit:gitcredentials[7] for more OAuth based authentication
+helpers.
Note: the following core Perl modules that may be installed with your
distribution of Perl are required:
diff --git a/Documentation/gitcredentials.adoc b/Documentation/gitcredentials.adoc
index 3337bb475d..b49923db02 100644
--- a/Documentation/gitcredentials.adoc
+++ b/Documentation/gitcredentials.adoc
@@ -133,6 +133,10 @@ Popular helpers with OAuth support include:
- https://github.com/hickford/git-credential-oauth[git-credential-oauth] (cross platform, included in many Linux distributions)
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-gmail] (cross platform, dedicated helper to authenticate Gmail accounts for linkgit:git-send-email[1])
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-outlook] (cross platform, dedicated helper to authenticate Microsoft Outlook accounts for linkgit:git-send-email[1])
+
CREDENTIAL CONTEXTS
-------------------
diff --git a/Documentation/scalar.adoc b/Documentation/scalar.adoc
index 7e4259c674..4bd5b150e8 100644
--- a/Documentation/scalar.adoc
+++ b/Documentation/scalar.adoc
@@ -9,12 +9,12 @@ SYNOPSIS
--------
[verse]
scalar clone [--single-branch] [--branch <main-branch>] [--full-clone]
- [--[no-]src] <url> [<enlistment>]
+ [--[no-]src] [--[no-]tags] [--[no-]maintenance] <url> [<enlistment>]
scalar list
-scalar register [<enlistment>]
+scalar register [--[no-]maintenance] [<enlistment>]
scalar unregister [<enlistment>]
scalar run ( all | config | commit-graph | fetch | loose-objects | pack-files ) [<enlistment>]
-scalar reconfigure [ --all | <enlistment> ]
+scalar reconfigure [--maintenance=(enable|disable|keep)] [ --all | <enlistment> ]
scalar diagnose [<enlistment>]
scalar delete <enlistment>
@@ -97,6 +97,11 @@ cloning. If the HEAD at the remote did not point at any branch when
A sparse-checkout is initialized by default. This behavior can be
turned off via `--full-clone`.
+--[no-]maintenance::
+ By default, `scalar clone` configures the enlistment to use Git's
+ background maintenance feature. Use the `--no-maintenance` to skip
+ this configuration.
+
List
~~~~
@@ -117,6 +122,12 @@ Note: when this subcommand is called in a worktree that is called `src/`, its
parent directory is considered to be the Scalar enlistment. If the worktree is
_not_ called `src/`, it itself will be considered to be the Scalar enlistment.
+--[no-]maintenance::
+ By default, `scalar register` configures the enlistment to use Git's
+ background maintenance feature. Use the `--no-maintenance` to skip
+ this configuration. This does not disable any maintenance that may
+ already be enabled in other ways.
+
Unregister
~~~~~~~~~~
@@ -149,8 +160,18 @@ After a Scalar upgrade, or when the configuration of a Scalar enlistment
was somehow corrupted or changed by mistake, this subcommand allows to
reconfigure the enlistment.
-With the `--all` option, all enlistments currently registered with Scalar
-will be reconfigured. Use this option after each Scalar upgrade.
+--all::
+ When `--all` is specified, reconfigure all enlistments currently
+ registered with Scalar by the `scalar.repo` config key. Use this
+ option after each upgrade to get the latest features.
+
+--maintenance=(enable|disable|keep)::
+ By default, Scalar configures the enlistment to use Git's
+ background maintenance feature; this is the same as using the
+ `enable` value for this option. Use the `disable` value to
+ remove each considered enlistment from background maintenance.
+ Use `keep' to leave the background maintenance configuration
+ untouched for these repositories.
Diagnose
~~~~~~~~
diff --git a/Documentation/technical/bundle-uri.adoc b/Documentation/technical/bundle-uri.adoc
index 91d3a13e32..12283fa9ed 100644
--- a/Documentation/technical/bundle-uri.adoc
+++ b/Documentation/technical/bundle-uri.adoc
@@ -232,13 +232,13 @@ will interact with bundle URIs according to the following flow:
are present in the client repository. If some are missing, then the
client delays unbundling until other bundles have been unbundled,
making those OIDs present. When all required OIDs are present, the
- client unbundles that data using a refspec. The default refspec is
- `+refs/heads/*:refs/bundles/*`, but this can be configured. These refs
- are stored so that later `git fetch` negotiations can communicate each
- bundled ref as a `have`, reducing the size of the fetch over the Git
- protocol. To allow pruning refs from this ref namespace, Git may
- introduce a numbered namespace (such as `refs/bundles/<i>/*`) such that
- stale bundle refs can be deleted.
+ client unbundles that data using a refspec. The refspec used is
+ `+refs/*:refs/bundles/*`. These refs are stored so that later
+ `git fetch` negotiations can communicate each bundled ref as a `have`,
+ reducing the size of the fetch over the Git protocol. To allow pruning
+ refs from this ref namespace, Git may introduce a numbered namespace
+ (such as `refs/bundles/<i>/*`) such that stale bundle refs can be
+ deleted.
3. If the file is instead a bundle list, then the client inspects the
`bundle.mode` to see if the list is of the `all` or `any` form.
diff --git a/Makefile b/Makefile
index 8a7f1c7654..70d1543b6b 100644
--- a/Makefile
+++ b/Makefile
@@ -819,6 +819,7 @@ TEST_BUILTINS_OBJS += test-mergesort.o
TEST_BUILTINS_OBJS += test-mktemp.o
TEST_BUILTINS_OBJS += test-name-hash.o
TEST_BUILTINS_OBJS += test-online-cpus.o
+TEST_BUILTINS_OBJS += test-pack-deltas.o
TEST_BUILTINS_OBJS += test-pack-mtimes.o
TEST_BUILTINS_OBJS += test-parse-options.o
TEST_BUILTINS_OBJS += test-parse-pathspec-file.o
@@ -858,6 +859,7 @@ TEST_BUILTINS_OBJS += test-wildmatch.o
TEST_BUILTINS_OBJS += test-windows-named-pipe.o
TEST_BUILTINS_OBJS += test-write-cache.o
TEST_BUILTINS_OBJS += test-xml-encode.o
+TEST_BUILTINS_OBJS += test-zlib.o
# Do not add more tests here unless they have extra dependencies. Add
# them in TEST_BUILTINS_OBJS above.
@@ -2804,7 +2806,7 @@ endif
compdb_dir = compile_commands
ifeq ($(GENERATE_COMPILATION_DATABASE),yes)
-missing_compdb_dir = $(compdb_dir)
+missing_compdb_dir = $(filter-out $(wildcard $(compdb_dir)), $(compdb_dir))
$(missing_compdb_dir):
@mkdir -p $@
diff --git a/builtin/am.c b/builtin/am.c
index 4afb519830..e32a3b4c97 100644
--- a/builtin/am.c
+++ b/builtin/am.c
@@ -850,8 +850,10 @@ static int split_mail_stgit_series(struct am_state *state, const char **paths,
series_dir = dirname(series_dir_buf);
fp = fopen(*paths, "r");
- if (!fp)
+ if (!fp) {
+ free(series_dir_buf);
return error_errno(_("could not open '%s' for reading"), *paths);
+ }
while (!strbuf_getline_lf(&sb, fp)) {
if (*sb.buf == '#')
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index 0e3f10a946..67a5ff2b9e 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -100,8 +100,7 @@ static int stream_blob(const struct object_id *oid)
return 0;
}
-static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
- int unknown_type)
+static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
{
int ret;
struct object_id oid;
@@ -110,7 +109,6 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
unsigned long size;
struct object_context obj_context = {0};
struct object_info oi = OBJECT_INFO_INIT;
- struct strbuf sb = STRBUF_INIT;
unsigned flags = OBJECT_INFO_LOOKUP_REPLACE;
unsigned get_oid_flags =
GET_OID_RECORD_PATH |
@@ -121,9 +119,6 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
if (!path && opt_cw)
get_oid_flags |= GET_OID_REQUIRE_PATH;
- if (unknown_type)
- flags |= OBJECT_INFO_ALLOW_UNKNOWN_TYPE;
-
if (get_oid_with_context(the_repository, obj_name, get_oid_flags, &oid,
&obj_context))
die("Not a valid object name %s", obj_name);
@@ -136,16 +131,12 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
buf = NULL;
switch (opt) {
case 't':
- oi.type_name = &sb;
+ oi.typep = &type;
if (oid_object_info_extended(the_repository, &oid, &oi, flags) < 0)
die("git cat-file: could not get object info");
- if (sb.len) {
- printf("%s\n", sb.buf);
- strbuf_release(&sb);
- ret = 0;
- goto cleanup;
- }
- break;
+ printf("%s\n", type_name(type));
+ ret = 0;
+ goto cleanup;
case 's':
oi.sizep = &size;
@@ -169,7 +160,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
goto cleanup;
case 'e':
- ret = !repo_has_object_file(the_repository, &oid);
+ ret = !has_object(the_repository, &oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR);
goto cleanup;
case 'w':
@@ -1037,8 +1029,7 @@ int cmd_cat_file(int argc,
const char * const builtin_catfile_usage[] = {
N_("git cat-file <type> <object>"),
- N_("git cat-file (-e | -p) <object>"),
- N_("git cat-file (-t | -s) [--allow-unknown-type] <object>"),
+ N_("git cat-file (-e | -p | -t | -s) <object>"),
N_("git cat-file (--textconv | --filters)\n"
" [<rev>:<path|tree-ish> | --path=<path|tree-ish> <rev>]"),
N_("git cat-file (--batch | --batch-check | --batch-command) [--batch-all-objects]\n"
@@ -1056,8 +1047,8 @@ int cmd_cat_file(int argc,
OPT_GROUP(N_("Emit [broken] object attributes")),
OPT_CMDMODE('t', NULL, &opt, N_("show object type (one of 'blob', 'tree', 'commit', 'tag', ...)"), 't'),
OPT_CMDMODE('s', NULL, &opt, N_("show object size"), 's'),
- OPT_BOOL(0, "allow-unknown-type", &unknown_type,
- N_("allow -s and -t to work with broken/corrupt objects")),
+ OPT_HIDDEN_BOOL(0, "allow-unknown-type", &unknown_type,
+ N_("historical option -- no-op")),
OPT_BOOL(0, "use-mailmap", &use_mailmap, N_("use mail map file")),
OPT_ALIAS(0, "mailmap", "use-mailmap"),
/* Batch mode */
@@ -1208,10 +1199,7 @@ int cmd_cat_file(int argc,
obj_name = argv[1];
}
- if (unknown_type && opt != 't' && opt != 's')
- die("git cat-file --allow-unknown-type: use with -s or -t");
-
- ret = cat_one_file(opt, exp_type, obj_name, unknown_type);
+ ret = cat_one_file(opt, exp_type, obj_name);
out:
list_objects_filter_release(&batch.objects_filter);
diff --git a/builtin/clone.c b/builtin/clone.c
index b6f378c438..91b9cd0d16 100644
--- a/builtin/clone.c
+++ b/builtin/clone.c
@@ -504,9 +504,7 @@ static void write_followtags(const struct ref *refs, const char *msg)
continue;
if (ends_with(ref->name, "^{}"))
continue;
- if (!repo_has_object_file_with_flags(the_repository, &ref->old_oid,
- OBJECT_INFO_QUICK |
- OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (!has_object(the_repository, &ref->old_oid, 0))
continue;
refs_update_ref(get_main_ref_store(the_repository), msg,
ref->name, &ref->old_oid, NULL, 0,
diff --git a/builtin/count-objects.c b/builtin/count-objects.c
index 0bb5360b2f..a88c0c9c09 100644
--- a/builtin/count-objects.c
+++ b/builtin/count-objects.c
@@ -12,7 +12,7 @@
#include "parse-options.h"
#include "quote.h"
#include "packfile.h"
-#include "object-store.h"
+#include "object-file.h"
static unsigned long garbage;
static off_t size_garbage;
diff --git a/builtin/fast-import.c b/builtin/fast-import.c
index c1e198f4e3..b2839c5f43 100644
--- a/builtin/fast-import.c
+++ b/builtin/fast-import.c
@@ -811,7 +811,8 @@ static char *keep_pack(const char *curr_index_name)
int keep_fd;
odb_pack_name(pack_data->repo, &name, pack_data->hash, "keep");
- keep_fd = odb_pack_keep(name.buf);
+ keep_fd = safe_create_file_with_leading_directories(pack_data->repo,
+ name.buf);
if (keep_fd < 0)
die_errno("cannot create keep file");
write_or_die(keep_fd, keep_msg, strlen(keep_msg));
diff --git a/builtin/fetch.c b/builtin/fetch.c
index 5279997c96..cda6eaf1fd 100644
--- a/builtin/fetch.c
+++ b/builtin/fetch.c
@@ -337,7 +337,6 @@ static void find_non_local_tags(const struct ref *refs,
struct string_list_item *remote_ref_item;
const struct ref *ref;
struct refname_hash_entry *item = NULL;
- const int quick_flags = OBJECT_INFO_QUICK | OBJECT_INFO_SKIP_FETCH_OBJECT;
refname_hash_init(&existing_refs);
refname_hash_init(&remote_refs);
@@ -367,9 +366,9 @@ static void find_non_local_tags(const struct ref *refs,
*/
if (ends_with(ref->name, "^{}")) {
if (item &&
- !repo_has_object_file_with_flags(the_repository, &ref->old_oid, quick_flags) &&
+ !has_object(the_repository, &ref->old_oid, 0) &&
!oidset_contains(&fetch_oids, &ref->old_oid) &&
- !repo_has_object_file_with_flags(the_repository, &item->oid, quick_flags) &&
+ !has_object(the_repository, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
item = NULL;
@@ -383,7 +382,7 @@ static void find_non_local_tags(const struct ref *refs,
* fetch.
*/
if (item &&
- !repo_has_object_file_with_flags(the_repository, &item->oid, quick_flags) &&
+ !has_object(the_repository, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
@@ -404,7 +403,7 @@ static void find_non_local_tags(const struct ref *refs,
* checked to see if it needs fetching.
*/
if (item &&
- !repo_has_object_file_with_flags(the_repository, &item->oid, quick_flags) &&
+ !has_object(the_repository, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
@@ -911,7 +910,8 @@ static int update_local_ref(struct ref *ref,
struct commit *current = NULL, *updated;
int fast_forward = 0;
- if (!repo_has_object_file(the_repository, &ref->new_oid))
+ if (!has_object(the_repository, &ref->new_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
die(_("object %s not found"), oid_to_hex(&ref->new_oid));
if (oideq(&ref->old_oid, &ref->new_oid)) {
@@ -1330,8 +1330,7 @@ static int check_exist_and_connected(struct ref *ref_map)
* we need all direct targets to exist.
*/
for (r = rm; r; r = r->next) {
- if (!repo_has_object_file_with_flags(the_repository, &r->old_oid,
- OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (!has_object(the_repository, &r->old_oid, HAS_OBJECT_RECHECK_PACKED))
return -1;
}
diff --git a/builtin/fsck.c b/builtin/fsck.c
index 6cac28356c..e7d96a9c8e 100644
--- a/builtin/fsck.c
+++ b/builtin/fsck.c
@@ -614,12 +614,11 @@ static void get_default_heads(void)
struct for_each_loose_cb
{
struct progress *progress;
- struct strbuf obj_type;
};
-static int fsck_loose(const struct object_id *oid, const char *path, void *data)
+static int fsck_loose(const struct object_id *oid, const char *path,
+ void *data UNUSED)
{
- struct for_each_loose_cb *cb_data = data;
struct object *obj;
enum object_type type = OBJ_NONE;
unsigned long size;
@@ -629,8 +628,6 @@ static int fsck_loose(const struct object_id *oid, const char *path, void *data)
struct object_id real_oid = *null_oid(the_hash_algo);
int err = 0;
- strbuf_reset(&cb_data->obj_type);
- oi.type_name = &cb_data->obj_type;
oi.sizep = &size;
oi.typep = &type;
@@ -642,10 +639,6 @@ static int fsck_loose(const struct object_id *oid, const char *path, void *data)
err = error(_("%s: object corrupt or missing: %s"),
oid_to_hex(oid), path);
}
- if (type != OBJ_NONE && type < 0)
- err = error(_("%s: object is of unknown type '%s': %s"),
- oid_to_hex(&real_oid), cb_data->obj_type.buf,
- path);
if (err < 0) {
errors_found |= ERROR_OBJECT;
free(contents);
@@ -697,7 +690,6 @@ static void fsck_object_dir(const char *path)
{
struct progress *progress = NULL;
struct for_each_loose_cb cb_data = {
- .obj_type = STRBUF_INIT,
.progress = progress,
};
@@ -712,7 +704,6 @@ static void fsck_object_dir(const char *path)
&cb_data);
display_progress(progress, 256);
stop_progress(&progress);
- strbuf_release(&cb_data.obj_type);
}
static int fsck_head_link(const char *head_ref_name,
diff --git a/builtin/gc.c b/builtin/gc.c
index e690453d4f..e33ba946e4 100644
--- a/builtin/gc.c
+++ b/builtin/gc.c
@@ -16,6 +16,7 @@
#include "builtin.h"
#include "abspath.h"
#include "date.h"
+#include "dir.h"
#include "environment.h"
#include "hex.h"
#include "config.h"
@@ -28,11 +29,12 @@
#include "commit.h"
#include "commit-graph.h"
#include "packfile.h"
-#include "object-store.h"
+#include "object-file.h"
#include "pack.h"
#include "pack-objects.h"
#include "path.h"
#include "reflog.h"
+#include "rerere.h"
#include "blob.h"
#include "tree.h"
#include "promisor-remote.h"
@@ -43,6 +45,7 @@
#include "hook.h"
#include "setup.h"
#include "trace2.h"
+#include "worktree.h"
#define FAILED_RUN "failed to run %s"
@@ -52,15 +55,9 @@ static const char * const builtin_gc_usage[] = {
};
static timestamp_t gc_log_expire_time;
-
static struct strvec repack = STRVEC_INIT;
-static struct strvec prune = STRVEC_INIT;
-static struct strvec prune_worktrees = STRVEC_INIT;
-static struct strvec rerere = STRVEC_INIT;
-
static struct tempfile *pidfile;
static struct lock_file log_lock;
-
static struct string_list pack_garbage = STRING_LIST_INIT_DUP;
static void clean_pack_garbage(void)
@@ -339,6 +336,94 @@ static int maintenance_task_reflog_expire(struct maintenance_run_opts *opts UNUS
return run_command(&cmd);
}
+static int maintenance_task_worktree_prune(struct maintenance_run_opts *opts UNUSED,
+ struct gc_config *cfg)
+{
+ struct child_process prune_worktrees_cmd = CHILD_PROCESS_INIT;
+
+ prune_worktrees_cmd.git_cmd = 1;
+ strvec_pushl(&prune_worktrees_cmd.args, "worktree", "prune", "--expire", NULL);
+ strvec_push(&prune_worktrees_cmd.args, cfg->prune_worktrees_expire);
+
+ return run_command(&prune_worktrees_cmd);
+}
+
+static int worktree_prune_condition(struct gc_config *cfg)
+{
+ struct strbuf buf = STRBUF_INIT;
+ int should_prune = 0, limit = 1;
+ timestamp_t expiry_date;
+ struct dirent *d;
+ DIR *dir = NULL;
+
+ git_config_get_int("maintenance.worktree-prune.auto", &limit);
+ if (limit <= 0) {
+ should_prune = limit < 0;
+ goto out;
+ }
+
+ if (parse_expiry_date(cfg->prune_worktrees_expire, &expiry_date))
+ goto out;
+
+ dir = opendir(repo_git_path_replace(the_repository, &buf, "worktrees"));
+ if (!dir)
+ goto out;
+
+ while (limit && (d = readdir_skip_dot_and_dotdot(dir))) {
+ char *wtpath;
+ strbuf_reset(&buf);
+ if (should_prune_worktree(d->d_name, &buf, &wtpath, expiry_date))
+ limit--;
+ free(wtpath);
+ }
+
+ should_prune = !limit;
+
+out:
+ if (dir)
+ closedir(dir);
+ strbuf_release(&buf);
+ return should_prune;
+}
+
+static int maintenance_task_rerere_gc(struct maintenance_run_opts *opts UNUSED,
+ struct gc_config *cfg UNUSED)
+{
+ struct child_process rerere_cmd = CHILD_PROCESS_INIT;
+ rerere_cmd.git_cmd = 1;
+ strvec_pushl(&rerere_cmd.args, "rerere", "gc", NULL);
+ return run_command(&rerere_cmd);
+}
+
+static int rerere_gc_condition(struct gc_config *cfg UNUSED)
+{
+ struct strbuf path = STRBUF_INIT;
+ int should_gc = 0, limit = 1;
+ DIR *dir = NULL;
+
+ git_config_get_int("maintenance.rerere-gc.auto", &limit);
+ if (limit <= 0) {
+ should_gc = limit < 0;
+ goto out;
+ }
+
+ /*
+ * We skip garbage collection in case we either have no "rr-cache"
+ * directory or when it doesn't contain at least one entry.
+ */
+ repo_git_path_replace(the_repository, &path, "rr-cache");
+ dir = opendir(path.buf);
+ if (!dir)
+ goto out;
+ should_gc = !!readdir_skip_dot_and_dotdot(dir);
+
+out:
+ strbuf_release(&path);
+ if (dir)
+ closedir(dir);
+ return should_gc;
+}
+
static int too_many_loose_objects(struct gc_config *cfg)
{
/*
@@ -728,9 +813,9 @@ static void gc_before_repack(struct maintenance_run_opts *opts,
}
int cmd_gc(int argc,
-const char **argv,
-const char *prefix,
-struct repository *repo UNUSED)
+ const char **argv,
+ const char *prefix,
+ struct repository *repo UNUSED)
{
int aggressive = 0;
int quiet = 0;
@@ -740,7 +825,6 @@ struct repository *repo UNUSED)
int daemonized = 0;
int keep_largest_pack = -1;
timestamp_t dummy;
- struct child_process rerere_cmd = CHILD_PROCESS_INIT;
struct maintenance_run_opts opts = MAINTENANCE_RUN_OPTS_INIT;
struct gc_config cfg = GC_CONFIG_INIT;
const char *prune_expire_sentinel = "sentinel";
@@ -779,9 +863,6 @@ struct repository *repo UNUSED)
builtin_gc_usage, builtin_gc_options);
strvec_pushl(&repack, "repack", "-d", "-l", NULL);
- strvec_pushl(&prune, "prune", "--expire", NULL);
- strvec_pushl(&prune_worktrees, "worktree", "prune", "--expire", NULL);
- strvec_pushl(&rerere, "rerere", "gc", NULL);
gc_config(&cfg);
@@ -907,34 +988,27 @@ struct repository *repo UNUSED)
if (cfg.prune_expire) {
struct child_process prune_cmd = CHILD_PROCESS_INIT;
+ strvec_pushl(&prune_cmd.args, "prune", "--expire", NULL);
/* run `git prune` even if using cruft packs */
- strvec_push(&prune, cfg.prune_expire);
+ strvec_push(&prune_cmd.args, cfg.prune_expire);
if (quiet)
- strvec_push(&prune, "--no-progress");
+ strvec_push(&prune_cmd.args, "--no-progress");
if (repo_has_promisor_remote(the_repository))
- strvec_push(&prune,
+ strvec_push(&prune_cmd.args,
"--exclude-promisor-objects");
prune_cmd.git_cmd = 1;
- strvec_pushv(&prune_cmd.args, prune.v);
+
if (run_command(&prune_cmd))
- die(FAILED_RUN, prune.v[0]);
+ die(FAILED_RUN, prune_cmd.args.v[0]);
}
}
- if (cfg.prune_worktrees_expire) {
- struct child_process prune_worktrees_cmd = CHILD_PROCESS_INIT;
-
- strvec_push(&prune_worktrees, cfg.prune_worktrees_expire);
- prune_worktrees_cmd.git_cmd = 1;
- strvec_pushv(&prune_worktrees_cmd.args, prune_worktrees.v);
- if (run_command(&prune_worktrees_cmd))
- die(FAILED_RUN, prune_worktrees.v[0]);
- }
+ if (cfg.prune_worktrees_expire &&
+ maintenance_task_worktree_prune(&opts, &cfg))
+ die(FAILED_RUN, "worktree");
- rerere_cmd.git_cmd = 1;
- strvec_pushv(&rerere_cmd.args, rerere.v);
- if (run_command(&rerere_cmd))
- die(FAILED_RUN, rerere.v[0]);
+ if (maintenance_task_rerere_gc(&opts, &cfg))
+ die(FAILED_RUN, "rerere");
report_garbage = report_pack_garbage;
reprepare_packed_git(the_repository);
@@ -1467,6 +1541,8 @@ enum maintenance_task_label {
TASK_COMMIT_GRAPH,
TASK_PACK_REFS,
TASK_REFLOG_EXPIRE,
+ TASK_WORKTREE_PRUNE,
+ TASK_RERERE_GC,
/* Leave as final value */
TASK__COUNT
@@ -1508,6 +1584,16 @@ static struct maintenance_task tasks[] = {
maintenance_task_reflog_expire,
reflog_expire_condition,
},
+ [TASK_WORKTREE_PRUNE] = {
+ "worktree-prune",
+ maintenance_task_worktree_prune,
+ worktree_prune_condition,
+ },
+ [TASK_RERERE_GC] = {
+ "rerere-gc",
+ maintenance_task_rerere_gc,
+ rerere_gc_condition,
+ },
};
static int compare_tasks_by_selection(const void *a_, const void *b_)
diff --git a/builtin/hash-object.c b/builtin/hash-object.c
index cd53fa3bde..6a99ec250d 100644
--- a/builtin/hash-object.c
+++ b/builtin/hash-object.c
@@ -19,49 +19,15 @@
#include "strbuf.h"
#include "write-or-die.h"
-enum {
- HASH_OBJECT_CHECK = (1 << 0),
- HASH_OBJECT_WRITE = (1 << 1),
-};
-
-/*
- * This is to create corrupt objects for debugging and as such it
- * needs to bypass the data conversion performed by, and the type
- * limitation imposed by, index_fd() and its callees.
- */
-static int hash_literally(struct object_id *oid, int fd, const char *type, unsigned flags)
+static void hash_fd(int fd, const char *type, const char *path, unsigned flags)
{
- struct strbuf buf = STRBUF_INIT;
- int ret;
-
- if (strbuf_read(&buf, fd, 4096) < 0)
- ret = -1;
- else
- ret = write_object_file_literally(buf.buf, buf.len, type, oid,
- (flags & HASH_OBJECT_WRITE) ? WRITE_OBJECT_FILE_PERSIST : 0);
- close(fd);
- strbuf_release(&buf);
- return ret;
-}
-
-static void hash_fd(int fd, const char *type, const char *path, unsigned flags,
- int literally)
-{
- unsigned int index_flags = 0;
struct stat st;
struct object_id oid;
- if (flags & HASH_OBJECT_WRITE)
- index_flags |= INDEX_WRITE_OBJECT;
- if (flags & HASH_OBJECT_CHECK)
- index_flags |= INDEX_FORMAT_CHECK;
-
if (fstat(fd, &st) < 0 ||
- (literally
- ? hash_literally(&oid, fd, type, flags)
- : index_fd(the_repository->index, &oid, fd, &st,
- type_from_string(type), path, index_flags)))
- die((flags & HASH_OBJECT_WRITE)
+ index_fd(the_repository->index, &oid, fd, &st,
+ type_from_string(type), path, flags))
+ die((flags & INDEX_WRITE_OBJECT)
? "Unable to add %s to database"
: "Unable to hash %s", path);
printf("%s\n", oid_to_hex(&oid));
@@ -69,15 +35,14 @@ static void hash_fd(int fd, const char *type, const char *path, unsigned flags,
}
static void hash_object(const char *path, const char *type, const char *vpath,
- unsigned flags, int literally)
+ unsigned flags)
{
int fd;
fd = xopen(path, O_RDONLY);
- hash_fd(fd, type, vpath, flags, literally);
+ hash_fd(fd, type, vpath, flags);
}
-static void hash_stdin_paths(const char *type, int no_filters, unsigned flags,
- int literally)
+static void hash_stdin_paths(const char *type, int no_filters, unsigned flags)
{
struct strbuf buf = STRBUF_INIT;
struct strbuf unquoted = STRBUF_INIT;
@@ -89,8 +54,7 @@ static void hash_stdin_paths(const char *type, int no_filters, unsigned flags,
die("line is badly quoted");
strbuf_swap(&buf, &unquoted);
}
- hash_object(buf.buf, type, no_filters ? NULL : buf.buf, flags,
- literally);
+ hash_object(buf.buf, type, no_filters ? NULL : buf.buf, flags);
}
strbuf_release(&buf);
strbuf_release(&unquoted);
@@ -111,19 +75,20 @@ int cmd_hash_object(int argc,
int hashstdin = 0;
int stdin_paths = 0;
int no_filters = 0;
- int literally = 0;
int nongit = 0;
- unsigned flags = HASH_OBJECT_CHECK;
+ unsigned flags = INDEX_FORMAT_CHECK;
const char *vpath = NULL;
char *vpath_free = NULL;
const struct option hash_object_options[] = {
OPT_STRING('t', NULL, &type, N_("type"), N_("object type")),
OPT_BIT('w', NULL, &flags, N_("write the object into the object database"),
- HASH_OBJECT_WRITE),
+ INDEX_WRITE_OBJECT),
OPT_COUNTUP( 0 , "stdin", &hashstdin, N_("read the object from stdin")),
OPT_BOOL( 0 , "stdin-paths", &stdin_paths, N_("read file names from stdin")),
OPT_BOOL( 0 , "no-filters", &no_filters, N_("store file as is without filters")),
- OPT_BOOL( 0, "literally", &literally, N_("just hash any random garbage to create corrupt objects for debugging Git")),
+ OPT_NEGBIT( 0, "literally", &flags,
+ N_("just hash any random garbage to create corrupt objects for debugging Git"),
+ INDEX_FORMAT_CHECK),
OPT_STRING( 0 , "path", &vpath, N_("file"), N_("process file as it were from this path")),
OPT_END()
};
@@ -133,7 +98,7 @@ int cmd_hash_object(int argc,
argc = parse_options(argc, argv, prefix, hash_object_options,
hash_object_usage, 0);
- if (flags & HASH_OBJECT_WRITE)
+ if (flags & INDEX_WRITE_OBJECT)
prefix = setup_git_directory();
else
prefix = setup_git_directory_gently(&nongit);
@@ -169,7 +134,7 @@ int cmd_hash_object(int argc,
}
if (hashstdin)
- hash_fd(0, type, vpath, flags, literally);
+ hash_fd(0, type, vpath, flags);
for (i = 0 ; i < argc; i++) {
const char *arg = argv[i];
@@ -178,12 +143,12 @@ int cmd_hash_object(int argc,
if (prefix)
arg = to_free = prefix_filename(prefix, arg);
hash_object(arg, type, no_filters ? NULL : vpath ? vpath : arg,
- flags, literally);
+ flags);
free(to_free);
}
if (stdin_paths)
- hash_stdin_paths(type, no_filters, flags, literally);
+ hash_stdin_paths(type, no_filters, flags);
free(vpath_free);
diff --git a/builtin/index-pack.c b/builtin/index-pack.c
index 60a8ee05db..bb7925bd29 100644
--- a/builtin/index-pack.c
+++ b/builtin/index-pack.c
@@ -892,9 +892,8 @@ static void sha1_object(const void *data, struct object_entry *obj_entry,
if (startup_info->have_repository) {
read_lock();
- collision_test_needed =
- repo_has_object_file_with_flags(the_repository, oid,
- OBJECT_INFO_QUICK);
+ collision_test_needed = has_object(the_repository, oid,
+ HAS_OBJECT_FETCH_PROMISOR);
read_unlock();
}
@@ -1109,8 +1108,8 @@ static void *threaded_second_pass(void *data)
set_thread_data(data);
for (;;) {
struct base_data *parent = NULL;
- struct object_entry *child_obj;
- struct base_data *child;
+ struct object_entry *child_obj = NULL;
+ struct base_data *child = NULL;
counter_lock();
display_progress(progress, nr_resolved_deltas);
@@ -1137,15 +1136,18 @@ static void *threaded_second_pass(void *data)
parent = list_first_entry(&work_head, struct base_data,
list);
- if (parent->ref_first <= parent->ref_last) {
+ while (parent->ref_first <= parent->ref_last) {
int offset = ref_deltas[parent->ref_first++].obj_no;
child_obj = objects + offset;
- if (child_obj->real_type != OBJ_REF_DELTA)
- die("REF_DELTA at offset %"PRIuMAX" already resolved (duplicate base %s?)",
- (uintmax_t) child_obj->idx.offset,
- oid_to_hex(&parent->obj->idx.oid));
+ if (child_obj->real_type != OBJ_REF_DELTA) {
+ child_obj = NULL;
+ continue;
+ }
child_obj->real_type = parent->obj->real_type;
- } else {
+ break;
+ }
+
+ if (!child_obj && parent->ofs_first <= parent->ofs_last) {
child_obj = objects +
ofs_deltas[parent->ofs_first++].obj_no;
assert(child_obj->real_type == OBJ_OFS_DELTA);
@@ -1178,29 +1180,32 @@ static void *threaded_second_pass(void *data)
}
work_unlock();
- if (parent) {
- child = resolve_delta(child_obj, parent);
- if (!child->children_remaining)
- FREE_AND_NULL(child->data);
- } else {
- child = make_base(child_obj, NULL);
- if (child->children_remaining) {
- /*
- * Since this child has its own delta children,
- * we will need this data in the future.
- * Inflate now so that future iterations will
- * have access to this object's data while
- * outside the work mutex.
- */
- child->data = get_data_from_pack(child_obj);
- child->size = child_obj->size;
+ if (child_obj) {
+ if (parent) {
+ child = resolve_delta(child_obj, parent);
+ if (!child->children_remaining)
+ FREE_AND_NULL(child->data);
+ } else{
+ child = make_base(child_obj, NULL);
+ if (child->children_remaining) {
+ /*
+ * Since this child has its own delta children,
+ * we will need this data in the future.
+ * Inflate now so that future iterations will
+ * have access to this object's data while
+ * outside the work mutex.
+ */
+ child->data = get_data_from_pack(child_obj);
+ child->size = child_obj->size;
+ }
}
}
work_lock();
if (parent)
parent->retain_data--;
- if (child->data) {
+
+ if (child && child->data) {
/*
* This child has its own children, so add it to
* work_head.
@@ -1209,7 +1214,7 @@ static void *threaded_second_pass(void *data)
base_cache_used += child->size;
prune_base_data(NULL);
free_base_data(child);
- } else {
+ } else if (child) {
/*
* This child does not have its own children. It may be
* the last descendant of its ancestors; free those
@@ -1565,7 +1570,7 @@ static void write_special_file(const char *suffix, const char *msg,
else
filename = odb_pack_name(the_repository, &name_buf, hash, suffix);
- fd = odb_pack_keep(filename);
+ fd = safe_create_file_with_leading_directories(the_repository, filename);
if (fd < 0) {
if (errno != EEXIST)
die_errno(_("cannot write %s file '%s'"),
diff --git a/builtin/receive-pack.c b/builtin/receive-pack.c
index be314879e8..c92e57ba18 100644
--- a/builtin/receive-pack.c
+++ b/builtin/receive-pack.c
@@ -1506,7 +1506,9 @@ static const char *update(struct command *cmd, struct shallow_info *si)
}
}
- if (!is_null_oid(new_oid) && !repo_has_object_file(the_repository, new_oid)) {
+ if (!is_null_oid(new_oid) &&
+ !has_object(the_repository, new_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
error("unpack should have generated %s, "
"but I can't find it!", oid_to_hex(new_oid));
ret = "bad pack";
diff --git a/builtin/remote.c b/builtin/remote.c
index b4baa34e66..0d6755bcb7 100644
--- a/builtin/remote.c
+++ b/builtin/remote.c
@@ -454,7 +454,8 @@ static int get_push_ref_states(const struct ref *remote_refs,
info->status = PUSH_STATUS_UPTODATE;
else if (is_null_oid(&ref->old_oid))
info->status = PUSH_STATUS_CREATE;
- else if (repo_has_object_file(the_repository, &ref->old_oid) &&
+ else if (has_object(the_repository, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
ref_newer(&ref->new_oid, &ref->old_oid))
info->status = PUSH_STATUS_FASTFORWARD;
else
diff --git a/builtin/replay.c b/builtin/replay.c
index 032c172b65..225cef0880 100644
--- a/builtin/replay.c
+++ b/builtin/replay.c
@@ -20,21 +20,22 @@
#include <oidset.h>
#include <tree.h>
-static const char *short_commit_name(struct commit *commit)
+static const char *short_commit_name(struct repository *repo,
+ struct commit *commit)
{
- return repo_find_unique_abbrev(the_repository, &commit->object.oid,
+ return repo_find_unique_abbrev(repo, &commit->object.oid,
DEFAULT_ABBREV);
}
-static struct commit *peel_committish(const char *name)
+static struct commit *peel_committish(struct repository *repo, const char *name)
{
struct object *obj;
struct object_id oid;
- if (repo_get_oid(the_repository, name, &oid))
+ if (repo_get_oid(repo, name, &oid))
return NULL;
- obj = parse_object(the_repository, &oid);
- return (struct commit *)repo_peel_to_type(the_repository, name, 0, obj,
+ obj = parse_object(repo, &oid);
+ return (struct commit *)repo_peel_to_type(repo, name, 0, obj,
OBJ_COMMIT);
}
@@ -50,7 +51,8 @@ static char *get_author(const char *message)
return NULL;
}
-static struct commit *create_commit(struct tree *tree,
+static struct commit *create_commit(struct repository *repo,
+ struct tree *tree,
struct commit *based_on,
struct commit *parent)
{
@@ -62,7 +64,7 @@ static struct commit *create_commit(struct tree *tree,
struct commit_extra_header *extra = NULL;
struct strbuf msg = STRBUF_INIT;
const char *out_enc = get_commit_output_encoding();
- const char *message = repo_logmsg_reencode(the_repository, based_on,
+ const char *message = repo_logmsg_reencode(repo, based_on,
NULL, out_enc);
const char *orig_message = NULL;
const char *exclude_gpgsig[] = { "gpgsig", NULL };
@@ -79,7 +81,7 @@ static struct commit *create_commit(struct tree *tree,
goto out;
}
- obj = parse_object(the_repository, &ret);
+ obj = parse_object(repo, &ret);
out:
free_commit_extra_headers(extra);
@@ -97,7 +99,8 @@ struct ref_info {
int negative_refexprs;
};
-static void get_ref_information(struct rev_cmdline_info *cmd_info,
+static void get_ref_information(struct repository *repo,
+ struct rev_cmdline_info *cmd_info,
struct ref_info *ref_info)
{
int i;
@@ -132,14 +135,14 @@ static void get_ref_information(struct rev_cmdline_info *cmd_info,
if (*refexpr == '^')
refexpr++;
- if (repo_dwim_ref(the_repository, refexpr, strlen(refexpr), &oid, &fullname, 0) != 1)
+ if (repo_dwim_ref(repo, refexpr, strlen(refexpr), &oid, &fullname, 0) != 1)
can_uniquely_dwim = 0;
if (e->flags & BOTTOM) {
if (can_uniquely_dwim)
strset_add(&ref_info->negative_refs, fullname);
if (!ref_info->negative_refexprs)
- ref_info->onto = lookup_commit_reference_gently(the_repository,
+ ref_info->onto = lookup_commit_reference_gently(repo,
&e->item->oid, 1);
ref_info->negative_refexprs++;
} else {
@@ -152,7 +155,8 @@ static void get_ref_information(struct rev_cmdline_info *cmd_info,
}
}
-static void determine_replay_mode(struct rev_cmdline_info *cmd_info,
+static void determine_replay_mode(struct repository *repo,
+ struct rev_cmdline_info *cmd_info,
const char *onto_name,
char **advance_name,
struct commit **onto,
@@ -160,14 +164,14 @@ static void determine_replay_mode(struct rev_cmdline_info *cmd_info,
{
struct ref_info rinfo;
- get_ref_information(cmd_info, &rinfo);
+ get_ref_information(repo, cmd_info, &rinfo);
if (!rinfo.positive_refexprs)
die(_("need some commits to replay"));
die_for_incompatible_opt2(!!onto_name, "--onto",
!!*advance_name, "--advance");
if (onto_name) {
- *onto = peel_committish(onto_name);
+ *onto = peel_committish(repo, onto_name);
if (rinfo.positive_refexprs <
strset_get_size(&rinfo.positive_refs))
die(_("all positive revisions given must be references"));
@@ -175,8 +179,8 @@ static void determine_replay_mode(struct rev_cmdline_info *cmd_info,
struct object_id oid;
char *fullname = NULL;
- *onto = peel_committish(*advance_name);
- if (repo_dwim_ref(the_repository, *advance_name, strlen(*advance_name),
+ *onto = peel_committish(repo, *advance_name);
+ if (repo_dwim_ref(repo, *advance_name, strlen(*advance_name),
&oid, &fullname, 0) == 1) {
free(*advance_name);
*advance_name = fullname;
@@ -245,7 +249,8 @@ static struct commit *mapped_commit(kh_oid_map_t *replayed_commits,
return kh_value(replayed_commits, pos);
}
-static struct commit *pick_regular_commit(struct commit *pickme,
+static struct commit *pick_regular_commit(struct repository *repo,
+ struct commit *pickme,
kh_oid_map_t *replayed_commits,
struct commit *onto,
struct merge_options *merge_opt,
@@ -257,12 +262,12 @@ static struct commit *pick_regular_commit(struct commit *pickme,
base = pickme->parents->item;
replayed_base = mapped_commit(replayed_commits, base, onto);
- result->tree = repo_get_commit_tree(the_repository, replayed_base);
- pickme_tree = repo_get_commit_tree(the_repository, pickme);
- base_tree = repo_get_commit_tree(the_repository, base);
+ result->tree = repo_get_commit_tree(repo, replayed_base);
+ pickme_tree = repo_get_commit_tree(repo, pickme);
+ base_tree = repo_get_commit_tree(repo, base);
- merge_opt->branch1 = short_commit_name(replayed_base);
- merge_opt->branch2 = short_commit_name(pickme);
+ merge_opt->branch1 = short_commit_name(repo, replayed_base);
+ merge_opt->branch2 = short_commit_name(repo, pickme);
merge_opt->ancestor = xstrfmt("parent of %s", merge_opt->branch2);
merge_incore_nonrecursive(merge_opt,
@@ -275,13 +280,13 @@ static struct commit *pick_regular_commit(struct commit *pickme,
merge_opt->ancestor = NULL;
if (!result->clean)
return NULL;
- return create_commit(result->tree, pickme, replayed_base);
+ return create_commit(repo, result->tree, pickme, replayed_base);
}
int cmd_replay(int argc,
const char **argv,
const char *prefix,
- struct repository *repo UNUSED)
+ struct repository *repo)
{
const char *advance_name_opt = NULL;
char *advance_name = NULL;
@@ -329,7 +334,7 @@ int cmd_replay(int argc,
"--advance", "--contained");
advance_name = xstrdup_or_null(advance_name_opt);
- repo_init_revisions(the_repository, &revs, prefix);
+ repo_init_revisions(repo, &revs, prefix);
/*
* Set desired values for rev walking options here. If they
@@ -380,7 +385,7 @@ int cmd_replay(int argc,
revs.simplify_history = 0;
}
- determine_replay_mode(&revs.cmdline, onto_name, &advance_name,
+ determine_replay_mode(repo, &revs.cmdline, onto_name, &advance_name,
&onto, &update_refs);
if (!onto) /* FIXME: Should handle replaying down to root commit */
@@ -391,7 +396,7 @@ int cmd_replay(int argc,
goto cleanup;
}
- init_basic_merge_options(&merge_opt, the_repository);
+ init_basic_merge_options(&merge_opt, repo);
memset(&result, 0, sizeof(result));
merge_opt.show_rename_progress = 0;
last_commit = onto;
@@ -406,8 +411,8 @@ int cmd_replay(int argc,
if (commit->parents->next)
die(_("replaying merge commits is not supported yet!"));
- last_commit = pick_regular_commit(commit, replayed_commits, onto,
- &merge_opt, &result);
+ last_commit = pick_regular_commit(repo, commit, replayed_commits,
+ onto, &merge_opt, &result);
if (!last_commit)
break;
diff --git a/builtin/rev-list.c b/builtin/rev-list.c
index c4cd4ed5c8..0984b607bf 100644
--- a/builtin/rev-list.c
+++ b/builtin/rev-list.c
@@ -924,7 +924,7 @@ int cmd_rev_list(int argc,
free((void *)entry->path);
}
- oidmap_free(&missing_objects, true);
+ oidmap_clear(&missing_objects, true);
}
stop_progress(&progress);
diff --git a/builtin/show-ref.c b/builtin/show-ref.c
index f81209f23c..623a52a45f 100644
--- a/builtin/show-ref.c
+++ b/builtin/show-ref.c
@@ -35,7 +35,8 @@ static void show_one(const struct show_one_options *opts,
const char *hex;
struct object_id peeled;
- if (!repo_has_object_file(the_repository, oid))
+ if (!has_object(the_repository, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
die("git show-ref: bad ref %s (%s)", refname,
oid_to_hex(oid));
diff --git a/builtin/unpack-objects.c b/builtin/unpack-objects.c
index 661be789f1..e905d5f4e1 100644
--- a/builtin/unpack-objects.c
+++ b/builtin/unpack-objects.c
@@ -449,7 +449,8 @@ static void unpack_delta_entry(enum object_type type, unsigned long delta_size,
delta_data = get_data(delta_size);
if (!delta_data)
return;
- if (repo_has_object_file(the_repository, &base_oid))
+ if (has_object(the_repository, &base_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
; /* Ok we have this one */
else if (resolve_against_held(nr, &base_oid,
delta_data, delta_size))
diff --git a/bulk-checkin.c b/bulk-checkin.c
index c31c31b18d..678e2ecc2c 100644
--- a/bulk-checkin.c
+++ b/bulk-checkin.c
@@ -130,7 +130,8 @@ static void flush_batch_fsync(void)
static int already_written(struct bulk_checkin_packfile *state, struct object_id *oid)
{
/* The object may already exist in the repository */
- if (repo_has_object_file(the_repository, oid))
+ if (has_object(the_repository, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 1;
/* Might want to keep the list sorted */
diff --git a/bundle-uri.c b/bundle-uri.c
index 96d2ba726d..dc120664d1 100644
--- a/bundle-uri.c
+++ b/bundle-uri.c
@@ -403,7 +403,7 @@ static int unbundle_from_file(struct repository *r, const char *file)
const char *branch_name;
int has_old;
- if (!skip_prefix(refname->string, "refs/heads/", &branch_name))
+ if (!skip_prefix(refname->string, "refs/", &branch_name))
continue;
strbuf_setlen(&bundle_ref, bundle_prefix_len);
diff --git a/cache-tree.c b/cache-tree.c
index c0e1e9ee1d..fa3858e282 100644
--- a/cache-tree.c
+++ b/cache-tree.c
@@ -238,7 +238,9 @@ int cache_tree_fully_valid(struct cache_tree *it)
int i;
if (!it)
return 0;
- if (it->entry_count < 0 || !repo_has_object_file(the_repository, &it->oid))
+ if (it->entry_count < 0 ||
+ has_object(the_repository, &it->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0;
for (i = 0; i < it->subtree_nr; i++) {
if (!cache_tree_fully_valid(it->down[i]->cache_tree))
@@ -289,7 +291,9 @@ static int update_one(struct cache_tree *it,
}
}
- if (0 <= it->entry_count && repo_has_object_file(the_repository, &it->oid))
+ if (0 <= it->entry_count &&
+ has_object(the_repository, &it->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return it->entry_count;
/*
@@ -395,7 +399,8 @@ static int update_one(struct cache_tree *it,
ce_missing_ok = mode == S_IFGITLINK || missing_ok ||
!must_check_existence(ce);
if (is_null_oid(oid) ||
- (!ce_missing_ok && !repo_has_object_file(the_repository, oid))) {
+ (!ce_missing_ok && !has_object(the_repository, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))) {
strbuf_release(&buffer);
if (expected_missing)
return -1;
@@ -443,7 +448,7 @@ static int update_one(struct cache_tree *it,
struct object_id oid;
hash_object_file(the_hash_algo, buffer.buf, buffer.len,
OBJ_TREE, &oid);
- if (repo_has_object_file_with_flags(the_repository, &oid, OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (has_object(the_repository, &oid, HAS_OBJECT_RECHECK_PACKED))
oidcpy(&it->oid, &oid);
else
to_invalidate = 1;
diff --git a/ci/install-dependencies.sh b/ci/install-dependencies.sh
index da19dada2c..d061a47293 100755
--- a/ci/install-dependencies.sh
+++ b/ci/install-dependencies.sh
@@ -9,7 +9,7 @@ begin_group "Install dependencies"
P4WHENCE=https://cdist2.perforce.com/perforce/r23.2
LFSWHENCE=https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION
-JGITWHENCE=https://repo.eclipse.org/content/groups/releases//org/eclipse/jgit/org.eclipse.jgit.pgm/6.8.0.202311291450-r/org.eclipse.jgit.pgm-6.8.0.202311291450-r.sh
+JGITWHENCE=https://repo1.maven.org/maven2/org/eclipse/jgit/org.eclipse.jgit.pgm/6.8.0.202311291450-r/org.eclipse.jgit.pgm-6.8.0.202311291450-r.sh
# Make sudo a no-op and execute the command directly when running as root.
# While using sudo would be fine on most platforms when we are root already,
@@ -71,7 +71,6 @@ ubuntu-*|i386/ubuntu-*|debian-*)
chmod a+x "$CUSTOM_PATH/p4d" "$CUSTOM_PATH/p4" || {
rm -f "$CUSTOM_PATH/p4"
rm -f "$CUSTOM_PATH/p4d"
- echo >&2 "P4 download (optional) failed"
}
wget --quiet \
@@ -79,16 +78,12 @@ ubuntu-*|i386/ubuntu-*|debian-*)
tar -xzf "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz" \
-C "$CUSTOM_PATH" --strip-components=1 \
"git-lfs-$LINUX_GIT_LFS_VERSION/git-lfs" &&
- rm "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz" || {
- rm -f "$CUSTOM_PATH/git-lfs"
- echo >&2 "LFS download (optional) failed"
- }
+ rm "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz" ||
+ rm -f "$CUSTOM_PATH/git-lfs"
wget --quiet "$JGITWHENCE" --output-document="$CUSTOM_PATH/jgit" &&
- chmod a+x "$CUSTOM_PATH/jgit" || {
- rm -f "$CUSTOM_PATH/jgit"
- echo >&2 "JGit download (optional) failed"
- }
+ chmod a+x "$CUSTOM_PATH/jgit" ||
+ rm -f "$CUSTOM_PATH/jgit"
;;
esac
;;
@@ -151,7 +146,7 @@ then
echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)"
p4 -V
else
- echo >&2 "WARNING: perforce wasn't installed, see above for clues why"
+ echo >&2 "::warning:: perforce wasn't installed, see above for clues why"
fi
if type git-lfs >/dev/null 2>&1
@@ -159,7 +154,7 @@ then
echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)"
git-lfs version
else
- echo >&2 "WARNING: git-lfs wasn't installed, see above for clues why"
+ echo >&2 "::warning:: git-lfs wasn't installed, see above for clues why"
fi
if type jgit >/dev/null 2>&1
@@ -167,7 +162,7 @@ then
echo "$(tput setaf 6)JGit Version$(tput sgr0)"
jgit version
else
- echo >&2 "WARNING: JGit wasn't installed, see above for clues why"
+ echo >&2 "::warning:: JGit wasn't installed, see above for clues why"
fi
end_group "Install dependencies"
diff --git a/ci/run-build-and-tests.sh b/ci/run-build-and-tests.sh
index f99b7db2ee..01823fd0f1 100755
--- a/ci/run-build-and-tests.sh
+++ b/ci/run-build-and-tests.sh
@@ -5,11 +5,6 @@
. ${0%/*}/lib.sh
-case "$CI_OS_NAME" in
-windows*) cmd //c mklink //j t\\.prove "$(cygpath -aw "$cache_dir/.prove")";;
-*) ln -s "$cache_dir/.prove" t/.prove;;
-esac
-
run_tests=t
case "$jobname" in
@@ -56,6 +51,7 @@ case "$jobname" in
--warnlevel 2 --werror \
--wrap-mode nofallback \
-Dfuzzers=true \
+ -Dtest_output_directory="${TEST_OUTPUT_DIRECTORY:-$(pwd)/t}" \
$MESONFLAGS
group "Build" meson compile -C build --
if test -n "$run_tests"
diff --git a/ci/run-test-slice.sh b/ci/run-test-slice.sh
index e167e646f7..0444c79c02 100755
--- a/ci/run-test-slice.sh
+++ b/ci/run-test-slice.sh
@@ -5,11 +5,6 @@
. ${0%/*}/lib.sh
-case "$CI_OS_NAME" in
-windows*) cmd //c mklink //j t\\.prove "$(cygpath -aw "$cache_dir/.prove")";;
-*) ln -s "$cache_dir/.prove" t/.prove;;
-esac
-
group "Run tests" make --quiet -C t T="$(cd t &&
./helper/test-tool path-utils slice-tests "$1" "$2" t[0-9]*.sh |
tr '\n' ' ')" ||
diff --git a/commit-graph.c b/commit-graph.c
index 6394752b0b..bb77d81861 100644
--- a/commit-graph.c
+++ b/commit-graph.c
@@ -222,7 +222,7 @@ static int commit_graph_compatible(struct repository *r)
if (replace_refs_enabled(r)) {
prepare_replace_object(r);
- if (hashmap_get_size(&r->objects->replace_map->map))
+ if (oidmap_get_size(&r->objects->replace_map))
return 0;
}
@@ -1929,6 +1929,8 @@ static int fill_oids_from_packs(struct write_commit_graph_context *ctx,
}
if (open_pack_index(p)) {
ret = error(_("error opening index for %s"), packname.buf);
+ close_pack(p);
+ free(p);
goto cleanup;
}
for_each_object_in_pack(p, add_packed_commits, ctx,
diff --git a/compiler-tricks/not-constant.c b/compiler-tricks/not-constant.c
index 1da3ffc2f5..9fb4f275b1 100644
--- a/compiler-tricks/not-constant.c
+++ b/compiler-tricks/not-constant.c
@@ -1,2 +1,2 @@
#include <git-compat-util.h>
-int false_but_the_compiler_does_not_know_it_;
+int false_but_the_compiler_does_not_know_it_ = 0;
diff --git a/config.mak.uname b/config.mak.uname
index df172d5871..7fc2c5bf8d 100644
--- a/config.mak.uname
+++ b/config.mak.uname
@@ -776,79 +776,3 @@ ifeq ($(uname_S),QNX)
NO_STRCASESTR = YesPlease
NO_STRLCPY = YesPlease
endif
-
-vcxproj:
- # Require clean work tree
- git update-index -q --refresh && \
- git diff-files --quiet && \
- git diff-index --cached --quiet HEAD --
-
- # Make .vcxproj files and add them
- perl contrib/buildsystems/generate -g Vcxproj
- git add -f git.sln {*,*/lib,t/helper/*}/*.vcxproj
-
- # Generate the LinkOrCopyBuiltins.targets and LinkOrCopyRemoteHttp.targets file
- (echo '<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">' && \
- echo ' <Target Name="CopyBuiltins_AfterBuild" AfterTargets="AfterBuild">' && \
- for name in $(BUILT_INS);\
- do \
- echo ' <Copy SourceFiles="$$(OutDir)\git.exe" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
- done && \
- echo ' </Target>' && \
- echo '</Project>') >git/LinkOrCopyBuiltins.targets
- (echo '<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">' && \
- echo ' <Target Name="CopyBuiltins_AfterBuild" AfterTargets="AfterBuild">' && \
- for name in $(REMOTE_CURL_ALIASES); \
- do \
- echo ' <Copy SourceFiles="$$(OutDir)\'"$(REMOTE_CURL_PRIMARY)"'" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
- done && \
- echo ' </Target>' && \
- echo '</Project>') >git-remote-http/LinkOrCopyRemoteHttp.targets
- git add -f git/LinkOrCopyBuiltins.targets git-remote-http/LinkOrCopyRemoteHttp.targets
-
- # Add generated headers
- $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(GENERATED_H)
- git add -f $(GENERATED_H)
-
- # Add scripts
- rm -f perl/perl.mak
- $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(SCRIPT_LIB) $(SCRIPTS)
- # Strip out the sane tool path, needed only for building
- sed -i '/^git_broken_path_fix ".*/d' git-sh-setup
- git add -f $(SCRIPT_LIB) $(SCRIPTS)
-
- # Add Perl module
- $(MAKE) $(LIB_PERL_GEN)
- git add -f perl/build
-
- # Add bin-wrappers, for testing
- rm -rf bin-wrappers/
- $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(test_bindir_programs)
- # Ensure that the GIT_EXEC_PATH is a Unix-y one, and that the absolute
- # path of the repository is not hard-coded (GIT_EXEC_PATH will be set
- # by test-lib.sh according to the current setup)
- sed -i -e 's/^\(GIT_EXEC_PATH\)=.*/test -n "$${\1##*:*}" ||\
- \1="$$(cygpath -u "$$\1")"/' \
- -e "s|'$$(pwd)|\"\$$GIT_EXEC_PATH\"'|g" bin-wrappers/*
- # Ensure that test-* helpers find the .dll files copied to top-level
- sed -i 's|^PATH=.*|&:"$$GIT_EXEC_PATH"|' bin-wrappers/test-*
- # We do not want to force hard-linking builtins
- sed -i 's|\(git\)-\([-a-z]*\)\.exe"|\1.exe" \2|g' \
- bin-wrappers/git-{receive-pack,upload-archive}
- git add -f $(test_bindir_programs)
-
- # Add templates
- $(MAKE) -C templates
- git add -f templates/boilerplates.made templates/blt/
-
- # Add the translated messages
- make MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(MOFILES)
- git add -f $(MOFILES)
-
- # Add build options
- $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 GIT-BUILD-OPTIONS
- git add -f GIT-BUILD-OPTIONS
-
- # Commit the whole shebang
- git commit -m "Generate Visual Studio solution" \
- -m "Auto-generated by \`$(MAKE)$(MAKEFLAGS) $@\`"
diff --git a/contrib/buildsystems/Generators.pm b/contrib/buildsystems/Generators.pm
deleted file mode 100644
index aa4cbaa2ad..0000000000
--- a/contrib/buildsystems/Generators.pm
+++ /dev/null
@@ -1,42 +0,0 @@
-package Generators;
-require Exporter;
-
-use strict;
-use File::Basename;
-no strict 'refs';
-use vars qw($VERSION @AVAILABLE);
-
-our $VERSION = '1.00';
-our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
-@ISA = qw(Exporter);
-
-BEGIN {
- local(*D);
- my $me = $INC{"Generators.pm"};
- die "Couldn't find myself in \@INC, which is required to load the generators!" if ("$me" eq "");
- $me = dirname($me);
- if (opendir(D,"$me/Generators")) {
- foreach my $gen (readdir(D)) {
- next unless ($gen =~ /\.pm$/);
- require "${me}/Generators/$gen";
- $gen =~ s,\.pm,,;
- push(@AVAILABLE, $gen);
- }
- closedir(D);
- my $gens = join(', ', @AVAILABLE);
- }
-
- push @EXPORT_OK, qw(available);
-}
-
-sub available {
- return @AVAILABLE;
-}
-
-sub generate {
- my ($gen, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- return eval("Generators::${gen}::generate(\$git_dir, \$out_dir, \$rel_dir, \%build_structure)") if grep(/^$gen$/, @AVAILABLE);
- die "Generator \"${gen}\" is not available!\nAvailable generators are: @AVAILABLE\n";
-}
-
-1;
diff --git a/contrib/buildsystems/Generators/QMake.pm b/contrib/buildsystems/Generators/QMake.pm
deleted file mode 100644
index ff3b657e61..0000000000
--- a/contrib/buildsystems/Generators/QMake.pm
+++ /dev/null
@@ -1,189 +0,0 @@
-package Generators::QMake;
-require Exporter;
-
-use strict;
-use vars qw($VERSION);
-
-our $VERSION = '1.00';
-our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
-@ISA = qw(Exporter);
-
-BEGIN {
- push @EXPORT_OK, qw(generate);
-}
-
-sub generate {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
-
- my @libs = @{$build_structure{"LIBS"}};
- foreach (@libs) {
- createLibProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
- }
-
- my @apps = @{$build_structure{"APPS"}};
- foreach (@apps) {
- createAppProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
- }
-
- createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
- return 0;
-}
-
-sub createLibProject {
- my ($libname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- print "Generate $libname lib project\n";
- $rel_dir = "../$rel_dir";
-
- my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_SOURCES"}})));
- my $defines = join(" \\\n\t", sort(@{$build_structure{"LIBS_${libname}_DEFINES"}}));
- my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_INCLUDES"}})));
- my $cflags = join(" ", sort(@{$build_structure{"LIBS_${libname}_CFLAGS"}}));
-
- my $cflags_debug = $cflags;
- $cflags_debug =~ s/-MT/-MTd/;
- $cflags_debug =~ s/-O.//;
-
- my $cflags_release = $cflags;
- $cflags_release =~ s/-MTd/-MT/;
-
- my @tmp = @{$build_structure{"LIBS_${libname}_LFLAGS"}};
- my @tmp2 = ();
- foreach (@tmp) {
- if (/^-LTCG/) {
- } elsif (/^-L/) {
- $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
- }
- push(@tmp2, $_);
- }
- my $lflags = join(" ", sort(@tmp));
-
- my $target = $libname;
- $target =~ s/\//_/g;
- $defines =~ s/-D//g;
- $defines =~ s/"/\\\\"/g;
- $includes =~ s/-I//g;
- mkdir "$target" || die "Could not create the directory $target for lib project!\n";
- open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
- print F << "EOM";
-TEMPLATE = lib
-TARGET = $target
-DESTDIR = $rel_dir
-
-CONFIG -= qt
-CONFIG += static
-
-QMAKE_CFLAGS =
-QMAKE_CFLAGS_RELEASE = $cflags_release
-QMAKE_CFLAGS_DEBUG = $cflags_debug
-QMAKE_LIBFLAGS = $lflags
-
-DEFINES += \\
- $defines
-
-INCLUDEPATH += \\
- $includes
-
-SOURCES += \\
- $sources
-EOM
- close F;
-}
-
-sub createAppProject {
- my ($appname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- print "Generate $appname app project\n";
- $rel_dir = "../$rel_dir";
-
- my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_SOURCES"}})));
- my $defines = join(" \\\n\t", sort(@{$build_structure{"APPS_${appname}_DEFINES"}}));
- my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_INCLUDES"}})));
- my $cflags = join(" ", sort(@{$build_structure{"APPS_${appname}_CFLAGS"}}));
-
- my $cflags_debug = $cflags;
- $cflags_debug =~ s/-MT/-MTd/;
- $cflags_debug =~ s/-O.//;
-
- my $cflags_release = $cflags;
- $cflags_release =~ s/-MTd/-MT/;
-
- my $libs;
- foreach (sort(@{$build_structure{"APPS_${appname}_LIBS"}})) {
- $_ =~ s/\//_/g;
- $libs .= " $_";
- }
- my @tmp = @{$build_structure{"APPS_${appname}_LFLAGS"}};
- my @tmp2 = ();
- foreach (@tmp) {
- # next if ($_ eq "-NODEFAULTLIB:MSVCRT.lib");
- if (/^-LTCG/) {
- } elsif (/^-L/) {
- $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
- }
- push(@tmp2, $_);
- }
- my $lflags = join(" ", sort(@tmp));
-
- my $target = $appname;
- $target =~ s/\.exe//;
- $target =~ s/\//_/g;
- $defines =~ s/-D//g;
- $defines =~ s/"/\\\\"/g;
- $includes =~ s/-I//g;
- mkdir "$target" || die "Could not create the directory $target for app project!\n";
- open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
- print F << "EOM";
-TEMPLATE = app
-TARGET = $target
-DESTDIR = $rel_dir
-
-CONFIG -= qt embed_manifest_exe
-CONFIG += console
-
-QMAKE_CFLAGS =
-QMAKE_CFLAGS_RELEASE = $cflags_release
-QMAKE_CFLAGS_DEBUG = $cflags_debug
-QMAKE_LFLAGS = $lflags
-LIBS = $libs
-
-DEFINES += \\
- $defines
-
-INCLUDEPATH += \\
- $includes
-
-win32:QMAKE_LFLAGS += -LIBPATH:$rel_dir
-else: QMAKE_LFLAGS += -L$rel_dir
-
-SOURCES += \\
- $sources
-EOM
- close F;
-}
-
-sub createGlueProject {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- my $libs = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"LIBS"}}));
- my $apps = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"APPS"}}));
- $libs =~ s/\.a//g;
- $libs =~ s/\//_/g;
- $libs =~ s/\|/\//g;
- $apps =~ s/\.exe//g;
- $apps =~ s/\//_/g;
- $apps =~ s/\|/\//g;
-
- my $filename = $out_dir;
- $filename =~ s/.*\/([^\/]+)$/$1/;
- $filename =~ s/\/$//;
- print "Generate glue project $filename.pro\n";
- open F, ">$filename.pro" || die "Could not open $filename.pro for writing!\n";
- print F << "EOM";
-TEMPLATE = subdirs
-CONFIG += ordered
-SUBDIRS += \\
-$libs \\
-$apps
-EOM
- close F;
-}
-
-1;
diff --git a/contrib/buildsystems/Generators/Vcproj.pm b/contrib/buildsystems/Generators/Vcproj.pm
deleted file mode 100644
index 737647e76a..0000000000
--- a/contrib/buildsystems/Generators/Vcproj.pm
+++ /dev/null
@@ -1,579 +0,0 @@
-package Generators::Vcproj;
-require Exporter;
-
-use strict;
-use vars qw($VERSION);
-use Digest::SHA qw(sha256_hex);
-
-our $VERSION = '1.00';
-our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
-@ISA = qw(Exporter);
-
-BEGIN {
- push @EXPORT_OK, qw(generate);
-}
-
-sub generate_guid ($) {
- my $hex = sha256_hex($_[0]);
- $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
- $hex =~ tr/a-z/A-Z/;
- return $hex;
-}
-
-sub generate {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- my @libs = @{$build_structure{"LIBS"}};
- foreach (@libs) {
- createLibProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
- }
-
- my @apps = @{$build_structure{"APPS"}};
- foreach (@apps) {
- createAppProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
- }
-
- createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
- return 0;
-}
-
-sub createLibProject {
- my ($libname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
- print "Generate $libname vcproj lib project\n";
- $rel_dir = "..\\$rel_dir";
- $rel_dir =~ s/\//\\/g;
-
- my $target = $libname;
- $target =~ s/\//_/g;
- $target =~ s/\.a//;
-
- my $uuid = generate_guid($libname);
- $$build_structure{"LIBS_${target}_GUID"} = $uuid;
-
- my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"LIBS_${libname}_SOURCES"}}));
- my @sources;
- foreach (@srcs) {
- $_ =~ s/\//\\/g;
- push(@sources, $_);
- }
- my $defines = join(",", sort(@{$$build_structure{"LIBS_${libname}_DEFINES"}}));
- my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"LIBS_${libname}_INCLUDES"}})));
- my $cflags = join(" ", sort(@{$$build_structure{"LIBS_${libname}_CFLAGS"}}));
- $cflags =~ s/\"/&quot;/g;
- $cflags =~ s/</&lt;/g;
- $cflags =~ s/>/&gt;/g;
-
- my $cflags_debug = $cflags;
- $cflags_debug =~ s/-MT/-MTd/;
- $cflags_debug =~ s/-O.//;
-
- my $cflags_release = $cflags;
- $cflags_release =~ s/-MTd/-MT/;
-
- my @tmp = @{$$build_structure{"LIBS_${libname}_LFLAGS"}};
- my @tmp2 = ();
- foreach (@tmp) {
- if (/^-LTCG/) {
- } elsif (/^-L/) {
- $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
- }
- push(@tmp2, $_);
- }
- my $lflags = join(" ", sort(@tmp));
-
- $defines =~ s/-D//g;
- $defines =~ s/\"/\\&quot;/g;
- $defines =~ s/</&lt;/g;
- $defines =~ s/>/&gt;/g;
- $defines =~ s/\'//g;
- $includes =~ s/-I//g;
- mkdir "$target" || die "Could not create the directory $target for lib project!\n";
- open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
- binmode F, ":crlf";
- print F << "EOM";
-<?xml version="1.0" encoding = "Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9,00"
- Name="$target"
- ProjectGUID="$uuid">
- <Platforms>
- <Platform
- Name="Win32"/>
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$rel_dir"
- ConfigurationType="4"
- CharacterSet="0"
- IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- AdditionalOptions="$cflags_debug"
- Optimization="0"
- InlineFunctionExpansion="1"
- AdditionalIncludeDirectories="$includes"
- PreprocessorDefinitions="WIN32,_DEBUG,$defines"
- MinimalRebuild="true"
- RuntimeLibrary="1"
- UsePrecompiledHeader="0"
- ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLibrarianTool"
- SuppressStartupBanner="true"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$rel_dir"
- ConfigurationType="4"
- CharacterSet="0"
- WholeProgramOptimization="1"
- IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- AdditionalOptions="$cflags_release"
- Optimization="2"
- InlineFunctionExpansion="1"
- EnableIntrinsicFunctions="true"
- AdditionalIncludeDirectories="$includes"
- PreprocessorDefinitions="WIN32,NDEBUG,$defines"
- RuntimeLibrary="0"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="0"
- ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLibrarianTool"
- SuppressStartupBanner="true"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <Files>
- <Filter
- Name="Source Files"
- Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
-EOM
- foreach(@sources) {
- print F << "EOM";
- <File
- RelativePath="$_"/>
-EOM
- }
- print F << "EOM";
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
-EOM
- close F;
-}
-
-sub createAppProject {
- my ($appname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
- print "Generate $appname vcproj app project\n";
- $rel_dir = "..\\$rel_dir";
- $rel_dir =~ s/\//\\/g;
-
- my $target = $appname;
- $target =~ s/\//_/g;
- $target =~ s/\.exe//;
-
- my $uuid = generate_guid($appname);
- $$build_structure{"APPS_${target}_GUID"} = $uuid;
-
- my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"APPS_${appname}_SOURCES"}}));
- my @sources;
- foreach (@srcs) {
- $_ =~ s/\//\\/g;
- push(@sources, $_);
- }
- my $defines = join(",", sort(@{$$build_structure{"APPS_${appname}_DEFINES"}}));
- my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"APPS_${appname}_INCLUDES"}})));
- my $cflags = join(" ", sort(@{$$build_structure{"APPS_${appname}_CFLAGS"}}));
- $cflags =~ s/\"/&quot;/g;
- $cflags =~ s/</&lt;/g;
- $cflags =~ s/>/&gt;/g;
-
- my $cflags_debug = $cflags;
- $cflags_debug =~ s/-MT/-MTd/;
- $cflags_debug =~ s/-O.//;
-
- my $cflags_release = $cflags;
- $cflags_release =~ s/-MTd/-MT/;
-
- my $libs;
- foreach (sort(@{$$build_structure{"APPS_${appname}_LIBS"}})) {
- $_ =~ s/\//_/g;
- $libs .= " $_";
- }
- my @tmp = @{$$build_structure{"APPS_${appname}_LFLAGS"}};
- my @tmp2 = ();
- foreach (@tmp) {
- if (/^-LTCG/) {
- } elsif (/^-L/) {
- $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
- }
- push(@tmp2, $_);
- }
- my $lflags = join(" ", sort(@tmp)) . " -LIBPATH:$rel_dir";
-
- $defines =~ s/-D//g;
- $defines =~ s/\"/\\&quot;/g;
- $defines =~ s/</&lt;/g;
- $defines =~ s/>/&gt;/g;
- $defines =~ s/\'//g;
- $defines =~ s/\\\\/\\/g;
- $includes =~ s/-I//g;
- mkdir "$target" || die "Could not create the directory $target for lib project!\n";
- open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
- binmode F, ":crlf";
- print F << "EOM";
-<?xml version="1.0" encoding = "Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9,00"
- Name="$target"
- ProjectGUID="$uuid">
- <Platforms>
- <Platform
- Name="Win32"/>
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$rel_dir"
- ConfigurationType="1"
- CharacterSet="0"
- IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- AdditionalOptions="$cflags_debug"
- Optimization="0"
- InlineFunctionExpansion="1"
- AdditionalIncludeDirectories="$includes"
- PreprocessorDefinitions="WIN32,_DEBUG,$defines"
- MinimalRebuild="true"
- RuntimeLibrary="1"
- UsePrecompiledHeader="0"
- ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- AdditionalDependencies="$libs"
- AdditionalOptions="$lflags"
- LinkIncremental="2"
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$rel_dir"
- ConfigurationType="1"
- CharacterSet="0"
- WholeProgramOptimization="1"
- IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- AdditionalOptions="$cflags_release"
- Optimization="2"
- InlineFunctionExpansion="1"
- EnableIntrinsicFunctions="true"
- AdditionalIncludeDirectories="$includes"
- PreprocessorDefinitions="WIN32,NDEBUG,$defines"
- RuntimeLibrary="0"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="0"
- ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- AdditionalDependencies="$libs"
- AdditionalOptions="$lflags"
- LinkIncremental="1"
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- OptimizeReferences="2"
- EnableCOMDATFolding="2"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <Files>
- <Filter
- Name="Source Files"
- Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
-EOM
- foreach(@sources) {
- print F << "EOM";
- <File
- RelativePath="$_"/>
-EOM
- }
- print F << "EOM";
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
-EOM
- close F;
-}
-
-sub createGlueProject {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- print "Generate solutions file\n";
- $rel_dir = "..\\$rel_dir";
- $rel_dir =~ s/\//\\/g;
- my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 10.00\n# Visual Studio 2008\n";
- my $SLN_PRE = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
- my $SLN_POST = "\nEndProject\n";
-
- my @libs = @{$build_structure{"LIBS"}};
- my @tmp;
- foreach (@libs) {
- $_ =~ s/\//_/g;
- $_ =~ s/\.a//;
- push(@tmp, $_);
- }
- @libs = @tmp;
-
- my @apps = @{$build_structure{"APPS"}};
- @tmp = ();
- foreach (@apps) {
- $_ =~ s/\//_/g;
- $_ =~ s/\.exe//;
- if ($_ eq "git" ) {
- unshift(@tmp, $_);
- } else {
- push(@tmp, $_);
- }
- }
- @apps = @tmp;
-
- open F, ">git.sln" || die "Could not open git.sln for writing!\n";
- binmode F, ":crlf";
- print F "$SLN_HEAD";
-
- my $uuid_libgit = $build_structure{"LIBS_libgit_GUID"};
- my $uuid_xdiff_lib = $build_structure{"LIBS_xdiff_lib_GUID"};
- foreach (@apps) {
- my $appname = $_;
- my $uuid = $build_structure{"APPS_${appname}_GUID"};
- print F "$SLN_PRE";
- print F "\"${appname}\", \"${appname}\\${appname}.vcproj\", \"${uuid}\"\n";
- print F " ProjectSection(ProjectDependencies) = postProject\n";
- print F " ${uuid_libgit} = ${uuid_libgit}\n";
- print F " ${uuid_xdiff_lib} = ${uuid_xdiff_lib}\n";
- print F " EndProjectSection";
- print F "$SLN_POST";
- }
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
- print F "$SLN_PRE";
- print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\"";
- print F "$SLN_POST";
- }
-
- print F << "EOM";
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Win32 = Debug|Win32
- Release|Win32 = Release|Win32
- EndGlobalSection
-EOM
- print F << "EOM";
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
-EOM
- foreach (@apps) {
- my $appname = $_;
- my $uuid = $build_structure{"APPS_${appname}_GUID"};
- print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
- print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
- print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
- print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
- }
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
- print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
- print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
- print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
- print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
- }
-
- print F << "EOM";
- EndGlobalSection
-EndGlobal
-EOM
- close F;
-}
-
-1;
diff --git a/contrib/buildsystems/Generators/Vcxproj.pm b/contrib/buildsystems/Generators/Vcxproj.pm
deleted file mode 100644
index b2e68a1671..0000000000
--- a/contrib/buildsystems/Generators/Vcxproj.pm
+++ /dev/null
@@ -1,402 +0,0 @@
-package Generators::Vcxproj;
-require Exporter;
-
-use strict;
-use vars qw($VERSION);
-use Digest::SHA qw(sha256_hex);
-
-our $VERSION = '1.00';
-our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
-@ISA = qw(Exporter);
-
-BEGIN {
- push @EXPORT_OK, qw(generate);
-}
-
-sub generate_guid ($) {
- my $hex = sha256_hex($_[0]);
- $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
- $hex =~ tr/a-z/A-Z/;
- return $hex;
-}
-
-sub generate {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- my @libs = @{$build_structure{"LIBS"}};
- foreach (@libs) {
- createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 1);
- }
-
- my @apps = @{$build_structure{"APPS"}};
- foreach (@apps) {
- createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 0);
- }
-
- createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
- return 0;
-}
-
-sub createProject {
- my ($name, $git_dir, $out_dir, $rel_dir, $build_structure, $static_library) = @_;
- my $label = $static_library ? "lib" : "app";
- my $prefix = $static_library ? "LIBS_" : "APPS_";
- my $config_type = $static_library ? "StaticLibrary" : "Application";
- print "Generate $name vcxproj $label project\n";
- my $cdup = $name;
- $cdup =~ s/[^\/]+/../g;
- $cdup =~ s/\//\\/g;
- $rel_dir = $rel_dir eq "." ? $cdup : "$cdup\\$rel_dir";
- $rel_dir =~ s/\//\\/g;
-
- my $target = $name;
- if ($static_library) {
- $target =~ s/\.a//;
- } else {
- $target =~ s/\.exe//;
- }
-
- my $uuid = generate_guid($name);
- $$build_structure{"$prefix${target}_GUID"} = $uuid;
- my $vcxproj = $target;
- $vcxproj =~ s/(.*\/)?(.*)/$&\/$2.vcxproj/;
- $vcxproj =~ s/([^\/]*)(\/lib)\/(lib.vcxproj)/$1$2\/$1_$3/;
- $$build_structure{"$prefix${target}_VCXPROJ"} = $vcxproj;
-
- my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"$prefix${name}_SOURCES"}}));
- my @sources;
- foreach (@srcs) {
- $_ =~ s/\//\\/g;
- push(@sources, $_);
- }
- my $defines = join(";", sort(@{$$build_structure{"$prefix${name}_DEFINES"}}));
- my $includes= join(";", sort(map { s/^-I//; s/\//\\/g; File::Spec->file_name_is_absolute($_) ? $_ : "$rel_dir\\$_" } @{$$build_structure{"$prefix${name}_INCLUDES"}}));
- my $cflags = join(" ", sort(map { s/^-[GLMOWZ].*//; s/.* .*/"$&"/; $_; } @{$$build_structure{"$prefix${name}_CFLAGS"}}));
- $cflags =~ s/</&lt;/g;
- $cflags =~ s/>/&gt;/g;
-
- my $libs_release = "\n ";
- my $libs_debug = "\n ";
- if (!$static_library && $name ne 'headless-git') {
- $libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib|reftable\/libreftable\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}}));
- $libs_debug = $libs_release;
- $libs_debug =~ s/zlib\.lib/zlibd\.lib/g;
- $libs_debug =~ s/libexpat\.lib/libexpatd\.lib/g;
- $libs_debug =~ s/libcurl\.lib/libcurl-d\.lib/g;
- }
-
- $defines =~ s/-D//g;
- $defines =~ s/</&lt;/g;
- $defines =~ s/>/&gt;/g;
- $defines =~ s/\'//g;
-
- die "Could not create the directory $target for $label project!\n" unless (-d "$target" || mkdir "$target");
-
- open F, ">$vcxproj" or die "Could not open $vcxproj for writing!\n";
- binmode F, ":crlf :utf8";
- print F chr(0xFEFF);
- print F << "EOM";
-<?xml version="1.0" encoding="utf-8"?>
-<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
- <ItemGroup Label="ProjectConfigurations">
- <ProjectConfiguration Include="Debug|Win32">
- <Configuration>Debug</Configuration>
- <Platform>Win32</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Release|Win32">
- <Configuration>Release</Configuration>
- <Platform>Win32</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Debug|x64">
- <Configuration>Debug</Configuration>
- <Platform>x64</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Release|x64">
- <Configuration>Release</Configuration>
- <Platform>x64</Platform>
- </ProjectConfiguration>
- </ItemGroup>
- <PropertyGroup Label="Globals">
- <ProjectGuid>$uuid</ProjectGuid>
- <Keyword>Win32Proj</Keyword>
- <VCPKGArch Condition="'\$(Platform)'=='Win32'">x86-windows</VCPKGArch>
- <VCPKGArch Condition="'\$(Platform)'!='Win32'">x64-windows</VCPKGArch>
- <VCPKGArchDirectory>$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)</VCPKGArchDirectory>
- <VCPKGBinDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
- <VCPKGLibDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
- <VCPKGBinDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
- <VCPKGLibDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
- <VCPKGIncludeDirectory>\$(VCPKGArchDirectory)\\include</VCPKGIncludeDirectory>
- <VCPKGLibs Condition="'\$(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
- <VCPKGLibs Condition="'\$(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
- </PropertyGroup>
- <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />
- <PropertyGroup Condition="'\$(Configuration)'=='Debug'" Label="Configuration">
- <UseDebugLibraries>true</UseDebugLibraries>
- <LinkIncremental>true</LinkIncremental>
- </PropertyGroup>
- <PropertyGroup Condition="'\$(Configuration)'=='Release'" Label="Configuration">
- <UseDebugLibraries>false</UseDebugLibraries>
- <WholeProgramOptimization>true</WholeProgramOptimization>
- </PropertyGroup>
- <PropertyGroup>
- <ConfigurationType>$config_type</ConfigurationType>
- <PlatformToolset>v140</PlatformToolset>
- <!-- <CharacterSet>UTF-8</CharacterSet> -->
- <OutDir>..\\</OutDir>
- <!-- <IntDir>\$(ProjectDir)\$(Configuration)\\</IntDir> -->
- </PropertyGroup>
- <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.props" />
- <ImportGroup Label="ExtensionSettings">
- </ImportGroup>
- <ImportGroup Label="Shared">
- </ImportGroup>
- <ImportGroup Label="PropertySheets">
- <Import Project="\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props" Condition="exists('\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- <PropertyGroup Label="UserMacros" />
- <PropertyGroup>
- <GenerateManifest>false</GenerateManifest>
- <EnableManagedIncrementalBuild>true</EnableManagedIncrementalBuild>
- </PropertyGroup>
- <ItemDefinitionGroup>
- <ClCompile>
- <AdditionalOptions>$cflags %(AdditionalOptions)</AdditionalOptions>
- <AdditionalIncludeDirectories>$cdup;$cdup\\compat;$cdup\\compat\\regex;$cdup\\compat\\win32;$cdup\\compat\\poll;$cdup\\compat\\vcbuild\\include;\$(VCPKGIncludeDirectory);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <EnableParallelCodeGeneration />
- <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion>
- <PrecompiledHeader />
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
- </ClCompile>
- <Lib>
- <SuppressStartupBanner>true</SuppressStartupBanner>
- </Lib>
- <Link>
- <AdditionalLibraryDirectories>\$(VCPKGLibDirectory);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
- <AdditionalDependencies>\$(VCPKGLibs);\$(AdditionalDependencies)</AdditionalDependencies>
- <AdditionalOptions>invalidcontinue.obj %(AdditionalOptions)</AdditionalOptions>
- <EntryPointSymbol>wmainCRTStartup</EntryPointSymbol>
- <ManifestFile>$cdup\\compat\\win32\\git.manifest</ManifestFile>
- <SubSystem>Console</SubSystem>
- </Link>
-EOM
- if ($target eq 'libgit') {
- print F << "EOM";
- <PreBuildEvent Condition="!Exists('$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)\\include\\openssl\\ssl.h')">
- <Message>Initialize VCPKG</Message>
- <Command>del "$cdup\\compat\\vcbuild\\vcpkg"</Command>
- <Command>call "$cdup\\compat\\vcbuild\\vcpkg_install.bat"</Command>
- </PreBuildEvent>
-EOM
- }
- print F << "EOM";
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'\$(Platform)'=='Win32'">
- <Link>
- <TargetMachine>MachineX86</TargetMachine>
- </Link>
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'\$(Configuration)'=='Debug'">
- <ClCompile>
- <Optimization>Disabled</Optimization>
- <PreprocessorDefinitions>WIN32;_DEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
- </ClCompile>
- <Link>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- </Link>
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'\$(Configuration)'=='Release'">
- <ClCompile>
- <Optimization>MaxSpeed</Optimization>
- <IntrinsicFunctions>true</IntrinsicFunctions>
- <PreprocessorDefinitions>WIN32;NDEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
- <FunctionLevelLinking>true</FunctionLevelLinking>
- <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
- </ClCompile>
- <Link>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- <EnableCOMDATFolding>true</EnableCOMDATFolding>
- <OptimizeReferences>true</OptimizeReferences>
- </Link>
- </ItemDefinitionGroup>
- <ItemGroup>
-EOM
- foreach(@sources) {
- print F << "EOM";
- <ClCompile Include="$_" />
-EOM
- }
- print F << "EOM";
- </ItemGroup>
-EOM
- if ((!$static_library || $target =~ 'vcs-svn' || $target =~ 'xdiff') && !($name =~ /headless-git/)) {
- my $uuid_libgit = $$build_structure{"LIBS_libgit_GUID"};
- my $uuid_libreftable = $$build_structure{"LIBS_reftable/libreftable_GUID"};
- my $uuid_xdiff_lib = $$build_structure{"LIBS_xdiff/lib_GUID"};
-
- print F << "EOM";
- <ItemGroup>
- <ProjectReference Include="$cdup\\libgit\\libgit.vcxproj">
- <Project>$uuid_libgit</Project>
- <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
- </ProjectReference>
-EOM
- if (!($name =~ /xdiff|libreftable/)) {
- print F << "EOM";
- <ProjectReference Include="$cdup\\reftable\\libreftable\\libreftable.vcxproj">
- <Project>$uuid_libreftable</Project>
- <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
- </ProjectReference>
-EOM
- }
- if (!($name =~ 'xdiff')) {
- print F << "EOM";
- <ProjectReference Include="$cdup\\xdiff\\lib\\xdiff_lib.vcxproj">
- <Project>$uuid_xdiff_lib</Project>
- <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
- </ProjectReference>
-EOM
- }
- if ($name =~ /(test-(line-buffer|svn-fe)|^git-remote-testsvn)\.exe$/) {
- my $uuid_vcs_svn_lib = $$build_structure{"LIBS_vcs-svn/lib_GUID"};
- print F << "EOM";
- <ProjectReference Include="$cdup\\vcs-svn\\lib\\vcs-svn_lib.vcxproj">
- <Project>$uuid_vcs_svn_lib</Project>
- <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
- </ProjectReference>
-EOM
- }
- print F << "EOM";
- </ItemGroup>
-EOM
- }
- print F << "EOM";
- <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.targets" />
-EOM
- if (!$static_library) {
- print F << "EOM";
- <Target Name="${target}_AfterBuild" AfterTargets="AfterBuild">
- <ItemGroup>
- <DLLsAndPDBs Include="\$(VCPKGBinDirectory)\\*.dll;\$(VCPKGBinDirectory)\\*.pdb" />
- </ItemGroup>
- <Copy SourceFiles="@(DLLsAndPDBs)" DestinationFolder="\$(OutDir)" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />
- <MakeDir Directories="..\\templates\\blt\\branches" />
- </Target>
-EOM
- }
- if ($target eq 'git') {
- print F " <Import Project=\"LinkOrCopyBuiltins.targets\" />\n";
- }
- if ($target eq 'git-remote-http') {
- print F " <Import Project=\"LinkOrCopyRemoteHttp.targets\" />\n";
- }
- print F << "EOM";
-</Project>
-EOM
- close F;
-}
-
-sub createGlueProject {
- my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
- print "Generate solutions file\n";
- $rel_dir = "..\\$rel_dir";
- $rel_dir =~ s/\//\\/g;
- my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 11.00\n# Visual Studio 2010\n";
- my $SLN_PRE = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
- my $SLN_POST = "\nEndProject\n";
-
- my @libs = @{$build_structure{"LIBS"}};
- my @tmp;
- foreach (@libs) {
- $_ =~ s/\.a//;
- push(@tmp, $_);
- }
- @libs = @tmp;
-
- my @apps = @{$build_structure{"APPS"}};
- @tmp = ();
- foreach (@apps) {
- $_ =~ s/\.exe//;
- if ($_ eq "git" ) {
- unshift(@tmp, $_);
- } else {
- push(@tmp, $_);
- }
- }
- @apps = @tmp;
-
- open F, ">git.sln" || die "Could not open git.sln for writing!\n";
- binmode F, ":crlf :utf8";
- print F chr(0xFEFF);
- print F "$SLN_HEAD";
-
- foreach (@apps) {
- my $appname = $_;
- my $uuid = $build_structure{"APPS_${appname}_GUID"};
- print F "$SLN_PRE";
- my $vcxproj = $build_structure{"APPS_${appname}_VCXPROJ"};
- $vcxproj =~ s/\//\\/g;
- $appname =~ s/.*\///;
- print F "\"${appname}\", \"${vcxproj}\", \"${uuid}\"";
- print F "$SLN_POST";
- }
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
- print F "$SLN_PRE";
- my $vcxproj = $build_structure{"LIBS_${libname}_VCXPROJ"};
- $vcxproj =~ s/\//\\/g;
- $libname =~ s/\//_/g;
- print F "\"${libname}\", \"${vcxproj}\", \"${uuid}\"";
- print F "$SLN_POST";
- }
-
- print F << "EOM";
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|x64 = Debug|x64
- Debug|x86 = Debug|x86
- Release|x64 = Release|x64
- Release|x86 = Release|x86
- EndGlobalSection
-EOM
- print F << "EOM";
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
-EOM
- foreach (@apps) {
- my $appname = $_;
- my $uuid = $build_structure{"APPS_${appname}_GUID"};
- print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
- print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
- print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
- print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
- print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
- print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
- print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
- print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
- }
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
- print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
- print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
- print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
- print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
- print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
- print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
- print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
- print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
- }
-
- print F << "EOM";
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
-EOM
- close F;
-}
-
-1;
diff --git a/contrib/buildsystems/engine.pl b/contrib/buildsystems/engine.pl
deleted file mode 100755
index 069be7e4be..0000000000
--- a/contrib/buildsystems/engine.pl
+++ /dev/null
@@ -1,395 +0,0 @@
-#!/usr/bin/perl -w
-######################################################################
-# Do not call this script directly!
-#
-# The generate script ensures that @INC is correct before the engine
-# is executed.
-#
-# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
-######################################################################
-use strict;
-use File::Basename;
-use File::Spec;
-use Cwd;
-use Generators;
-use Text::ParseWords;
-
-my (%build_structure, %compile_options, @makedry);
-my $out_dir = getcwd();
-my $git_dir = $out_dir;
-$git_dir =~ s=\\=/=g;
-$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
-die "Couldn't find Git repo" if ("$git_dir" eq "");
-
-my @gens = Generators::available();
-my $gen = "Vcproj";
-
-sub showUsage
-{
- my $genlist = join(', ', @gens);
- print << "EOM";
-generate usage:
- -g <GENERATOR> --gen <GENERATOR> Specify the buildsystem generator (default: $gen)
- Available: $genlist
- -o <PATH> --out <PATH> Specify output directory generation (default: .)
- --make-out <PATH> Write the output of GNU Make into a file
- -i <FILE> --in <FILE> Specify input file, instead of running GNU Make
- -h,-? --help This help
-EOM
- exit 0;
-}
-
-# Parse command-line options
-my $make_out;
-while (@ARGV) {
- my $arg = shift @ARGV;
- if ("$arg" eq "-h" || "$arg" eq "--help" || "$arg" eq "-?") {
- showUsage();
- exit(0);
- } elsif("$arg" eq "--out" || "$arg" eq "-o") {
- $out_dir = shift @ARGV;
- } elsif("$arg" eq "--make-out") {
- $make_out = shift @ARGV;
- } elsif("$arg" eq "--gen" || "$arg" eq "-g") {
- $gen = shift @ARGV;
- } elsif("$arg" eq "--in" || "$arg" eq "-i") {
- my $infile = shift @ARGV;
- open(F, "<$infile") || die "Couldn't open file $infile";
- @makedry = <F>;
- close(F);
- } else {
- die "Unknown option: " . $arg;
- }
-}
-
-# NOT using File::Spec->rel2abs($path, $base) here, as
-# it fails badly for me in the msysgit environment
-$git_dir = File::Spec->rel2abs($git_dir);
-$out_dir = File::Spec->rel2abs($out_dir);
-my $rel_dir = makeOutRel2Git($git_dir, $out_dir);
-
-# Print some information so the user feels informed
-print << "EOM";
------
-Generator: $gen
-Git dir: $git_dir
-Out dir: $out_dir
------
-Running GNU Make to figure out build structure...
-EOM
-
-# Pipe a make --dry-run into a variable, if not already loaded from file
-# Capture the make dry stderr to file for review (will be empty for a release build).
-
-my $ErrsFile = "msvc-build-makedryerrors.txt";
-@makedry = `make -C $git_dir -n MSVC=1 SKIP_VCPKG=1 V=1 2>$ErrsFile`
-if !@makedry;
-# test for an empty Errors file and remove it
-unlink $ErrsFile if -f -z $ErrsFile;
-
-if (defined $make_out) {
- open OUT, ">" . $make_out;
- print OUT @makedry;
- close OUT;
-}
-
-# Parse the make output into usable info
-parseMakeOutput();
-
-# Finally, ask the generator to start generating..
-Generators::generate($gen, $git_dir, $out_dir, $rel_dir, %build_structure);
-
-# main flow ends here
-# -------------------------------------------------------------------------------------------------
-
-
-# 1) path: /foo/bar/baz 2) path: /foo/bar/baz 3) path: /foo/bar/baz
-# base: /foo/bar/baz/temp base: /foo/bar base: /tmp
-# rel: .. rel: baz rel: ../foo/bar/baz
-sub makeOutRel2Git
-{
- my ($path, $base) = @_;
- my $rel;
- if ("$path" eq "$base") {
- return ".";
- } elsif ($base =~ /^$path/) {
- # case 1
- my $tmp = $base;
- $tmp =~ s/^$path//;
- foreach (split('/', $tmp)) {
- $rel .= "../" if ("$_" ne "");
- }
- } elsif ($path =~ /^$base/) {
- # case 2
- $rel = $path;
- $rel =~ s/^$base//;
- $rel = "./$rel";
- } else {
- my $tmp = $base;
- foreach (split('/', $tmp)) {
- $rel .= "../" if ("$_" ne "");
- }
- $rel .= $path;
- }
- $rel =~ s/\/\//\//g; # simplify
- $rel =~ s/\/$//; # don't end with /
- return $rel;
-}
-
-sub parseMakeOutput
-{
- print "Parsing GNU Make output to figure out build structure...\n";
- my $line = 0;
- while (my $text = shift @makedry) {
- my $ate_next;
- do {
- $ate_next = 0;
- $line++;
- chomp $text;
- chop $text if ($text =~ /\r$/);
- if ($text =~ /\\$/) {
- $text =~ s/\\$//;
- $text .= shift @makedry;
- $ate_next = 1;
- }
- } while($ate_next);
-
- if ($text =~ /^test /) {
- # options to test (eg -o) may be mistaken for linker options
- next;
- }
-
- if ($text =~ /^(mkdir|msgfmt) /) {
- # options to the Portable Object translations
- # the line "mkdir ... && msgfmt ..." contains no linker options
- next;
- }
-
- if($text =~ / -c /) {
- # compilation
- handleCompileLine($text, $line);
-
- } elsif ($text =~ / -o /) {
- # linking executable
- handleLinkLine($text, $line);
-
- } elsif ($text =~ /\.o / && $text =~ /\.a /) {
- # libifying
- handleLibLine($text, $line);
-#
-# } elsif ($text =~ /^cp /) {
-# # copy file around
-#
-# } elsif ($text =~ /^rm -f /) {
-# # shell command
-#
-# } elsif ($text =~ /^make[ \[]/) {
-# # make output
-#
-# } elsif ($text =~ /^echo /) {
-# # echo to file
-#
-# } elsif ($text =~ /^if /) {
-# # shell conditional
-#
-# } elsif ($text =~ /^tclsh /) {
-# # translation stuff
-#
-# } elsif ($text =~ /^umask /) {
-# # handling boilerplates
-#
-# } elsif ($text =~ /\$\(\:\)/) {
-# # ignore
-#
-# } elsif ($text =~ /^FLAGS=/) {
-# # flags check for dependencies
-#
-# } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
-# # perl commands for copying files
-#
-# } elsif ($text =~ /generate-cmdlist\.sh/) {
-# # command for generating list of commands
-#
-# } elsif ($text =~ /new locations or Tcl/) {
-# # command for detecting Tcl/Tk changes
-#
-# } elsif ($text =~ /mkdir -p/) {
-# # command creating path
-#
-# } elsif ($text =~ /: no custom templates yet/) {
-# # whatever
-#
-# } else {
-# print "Unhandled (line: $line): $text\n";
- }
- }
-
-# use Data::Dumper;
-# print "Parsed build structure:\n";
-# print Dumper(%build_structure);
-}
-
-# variables for the compilation part of each step
-my (@defines, @incpaths, @cflags, @sources);
-
-sub clearCompileStep
-{
- @defines = ();
- @incpaths = ();
- @cflags = ();
- @sources = ();
-}
-
-sub removeDuplicates
-{
- my (%dupHash, $entry);
- %dupHash = map { $_, 1 } @defines;
- @defines = keys %dupHash;
-
- %dupHash = map { $_, 1 } @incpaths;
- @incpaths = keys %dupHash;
-
- %dupHash = map { $_, 1 } @cflags;
- @cflags = keys %dupHash;
-}
-
-sub handleCompileLine
-{
- my ($line, $lineno) = @_;
- my @parts = shellwords($line);
- my $sourcefile;
- shift(@parts); # ignore cmd
- while (my $part = shift @parts) {
- if ("$part" eq "-o") {
- # ignore object file
- shift @parts;
- } elsif ("$part" eq "-c") {
- # ignore compile flag
- } elsif ("$part" eq "-c") {
- } elsif ($part =~ /^.?-I/) {
- push(@incpaths, $part);
- } elsif ($part =~ /^.?-D/) {
- push(@defines, $part);
- } elsif ($part =~ /^-/) {
- push(@cflags, $part);
- } elsif ($part =~ /\.(c|cc|cpp)$/) {
- $sourcefile = $part;
- } else {
- die "Unhandled compiler option @ line $lineno: $part";
- }
- }
- @{$compile_options{"${sourcefile}_CFLAGS"}} = @cflags;
- @{$compile_options{"${sourcefile}_DEFINES"}} = @defines;
- @{$compile_options{"${sourcefile}_INCPATHS"}} = @incpaths;
- clearCompileStep();
-}
-
-sub handleLibLine
-{
- my ($line, $lineno) = @_;
- my (@objfiles, @lflags, $libout, $part);
- # kill cmd and rm 'prefix'
- $line =~ s/^rm -f .* && .* rcs //;
- my @parts = shellwords($line);
- while ($part = shift @parts) {
- if ($part =~ /^-/) {
- push(@lflags, $part);
- } elsif ($part =~ /\.(o|obj)$/) {
- push(@objfiles, $part);
- } elsif ($part =~ /\.(a|lib)$/) {
- $libout = $part;
- $libout =~ s/\.a$//;
- } else {
- die "Unhandled lib option @ line $lineno: $part";
- }
- }
-# print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
-# exit(1);
- foreach (@objfiles) {
- my $sourcefile = $_;
- $sourcefile =~ s/\.o$/.c/;
- push(@sources, $sourcefile);
- push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
- push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
- push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
- }
- removeDuplicates();
-
- push(@{$build_structure{"LIBS"}}, $libout);
- @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
- "_OBJECTS");
- @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
- @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
- @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
- @{$build_structure{"LIBS_${libout}_LFLAGS"}} = @lflags;
- @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
- @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
- clearCompileStep();
-}
-
-sub handleLinkLine
-{
- my ($line, $lineno) = @_;
- my (@objfiles, @lflags, @libs, $appout, $part);
- my @parts = shellwords($line);
- shift(@parts); # ignore cmd
- while ($part = shift @parts) {
- if ($part =~ /^-IGNORE/) {
- push(@lflags, $part);
- } elsif ($part =~ /^-[GRIMDO]/) {
- # eat compiler flags
- } elsif ("$part" eq "-o") {
- $appout = shift @parts;
- } elsif ("$part" eq "-lz") {
- push(@libs, "zlib.lib");
- } elsif ("$part" eq "-lcrypto") {
- push(@libs, "libcrypto.lib");
- } elsif ("$part" eq "-lssl") {
- push(@libs, "libssl.lib");
- } elsif ("$part" eq "-lcurl") {
- push(@libs, "libcurl.lib");
- } elsif ("$part" eq "-lexpat") {
- push(@libs, "libexpat.lib");
- } elsif ("$part" eq "-liconv") {
- push(@libs, "iconv.lib");
- } elsif ($part =~ /^[-\/]/) {
- push(@lflags, $part);
- } elsif ($part =~ /\.(a|lib)$/) {
- $part =~ s/\.a$/.lib/;
- push(@libs, $part);
- } elsif ($part eq 'invalidcontinue.obj') {
- # ignore - known to MSVC
- } elsif ($part =~ /\.o$/) {
- push(@objfiles, $part);
- } elsif ($part =~ /\.obj$/) {
- # do nothing, 'make' should not be producing .obj, only .o files
- } else {
- die "Unhandled link option @ line $lineno: $part";
- }
- }
-# print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n";
-# exit(1);
- foreach (@objfiles) {
- my $sourcefile = $_;
- $sourcefile =~ s/^headless-git\.o$/compat\/win32\/headless.c/;
- $sourcefile =~ s/\.o$/.c/;
- push(@sources, $sourcefile);
- push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
- push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
- push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
- }
- removeDuplicates();
-
- removeDuplicates();
- push(@{$build_structure{"APPS"}}, $appout);
- @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
- "_SOURCES", "_OBJECTS", "_LIBS");
- @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
- @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
- @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
- @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
- @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
- @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
- @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
- clearCompileStep();
-}
diff --git a/contrib/buildsystems/generate b/contrib/buildsystems/generate
deleted file mode 100755
index bc10f25ff2..0000000000
--- a/contrib/buildsystems/generate
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/perl -w
-######################################################################
-# Generate buildsystem files
-#
-# This script generate buildsystem files based on the output of a
-# GNU Make --dry-run, enabling Windows users to develop Git with their
-# trusted IDE with native projects.
-#
-# Note:
-# It is not meant as *the* way of building Git with MSVC, but merely a
-# convenience. The correct way of building Git with MSVC is to use the
-# GNU Make tool to build with the maintained Makefile in the root of
-# the project. If you have the msysgit environment installed and
-# available in your current console, together with the Visual Studio
-# environment you wish to build for, all you have to do is run the
-# command:
-# make MSVC=1
-#
-# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
-######################################################################
-use strict;
-use File::Basename;
-use Cwd;
-
-my $git_dir = getcwd();
-$git_dir =~ s=\\=/=g;
-$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
-die "Couldn't find Git repo" if ("$git_dir" eq "");
-exec join(" ", ("PERL5LIB=${git_dir}/contrib/buildsystems ${git_dir}/contrib/buildsystems/engine.pl", @ARGV));
diff --git a/contrib/buildsystems/parse.pl b/contrib/buildsystems/parse.pl
deleted file mode 100755
index c9656ece99..0000000000
--- a/contrib/buildsystems/parse.pl
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/perl -w
-######################################################################
-# Do not call this script directly!
-#
-# The generate script ensures that @INC is correct before the engine
-# is executed.
-#
-# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
-######################################################################
-use strict;
-use File::Basename;
-use Cwd;
-
-my $file = $ARGV[0];
-die "No file provided!" if !defined $file;
-
-my ($cflags, $target, $type, $line);
-
-open(F, "<$file") || die "Couldn't open file $file";
-my @data = <F>;
-close(F);
-
-while (my $text = shift @data) {
- my $ate_next;
- do {
- $ate_next = 0;
- $line++;
- chomp $text;
- chop $text if ($text =~ /\r$/);
- if ($text =~ /\\$/) {
- $text =~ s/\\$//;
- $text .= shift @data;
- $ate_next = 1;
- }
- } while($ate_next);
-
- if($text =~ / -c /) {
- # compilation
- handleCompileLine($text, $line);
-
- } elsif ($text =~ / -o /) {
- # linking executable
- handleLinkLine($text, $line);
-
- } elsif ($text =~ /\.o / && $text =~ /\.a /) {
- # libifying
- handleLibLine($text, $line);
-
-# } elsif ($text =~ /^cp /) {
-# # copy file around
-#
-# } elsif ($text =~ /^rm -f /) {
-# # shell command
-#
-# } elsif ($text =~ /^make[ \[]/) {
-# # make output
-#
-# } elsif ($text =~ /^echo /) {
-# # echo to file
-#
-# } elsif ($text =~ /^if /) {
-# # shell conditional
-#
-# } elsif ($text =~ /^tclsh /) {
-# # translation stuff
-#
-# } elsif ($text =~ /^umask /) {
-# # handling boilerplates
-#
-# } elsif ($text =~ /\$\(\:\)/) {
-# # ignore
-#
-# } elsif ($text =~ /^FLAGS=/) {
-# # flags check for dependencies
-#
-# } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
-# # perl commands for copying files
-#
-# } elsif ($text =~ /generate-cmdlist\.sh/) {
-# # command for generating list of commands
-#
-# } elsif ($text =~ /^test / && $text =~ /|| rm -f /) {
-# # commands removing executables, if they exist
-#
-# } elsif ($text =~ /new locations or Tcl/) {
-# # command for detecting Tcl/Tk changes
-#
-# } elsif ($text =~ /mkdir -p/) {
-# # command creating path
-#
-# } elsif ($text =~ /: no custom templates yet/) {
-# # whatever
-
- } else {
-# print "Unhandled (line: $line): $text\n";
- }
-}
-close(F);
-
-# use Data::Dumper;
-# print "Parsed build structure:\n";
-# print Dumper(%build_structure);
-
-# -------------------------------------------------------------------
-# Functions under here
-# -------------------------------------------------------------------
-my (%build_structure, @defines, @incpaths, @cflags, @sources);
-
-sub clearCompileStep
-{
- @defines = ();
- @incpaths = ();
- @cflags = ();
- @sources = ();
-}
-
-sub removeDuplicates
-{
- my (%dupHash, $entry);
- %dupHash = map { $_, 1 } @defines;
- @defines = keys %dupHash;
-
- %dupHash = map { $_, 1 } @incpaths;
- @incpaths = keys %dupHash;
-
- %dupHash = map { $_, 1 } @cflags;
- @cflags = keys %dupHash;
-
- %dupHash = map { $_, 1 } @sources;
- @sources = keys %dupHash;
-}
-
-sub handleCompileLine
-{
- my ($line, $lineno) = @_;
- my @parts = split(' ', $line);
- shift(@parts); # ignore cmd
- while (my $part = shift @parts) {
- if ("$part" eq "-o") {
- # ignore object file
- shift @parts;
- } elsif ("$part" eq "-c") {
- # ignore compile flag
- } elsif ("$part" eq "-c") {
- } elsif ($part =~ /^.?-I/) {
- push(@incpaths, $part);
- } elsif ($part =~ /^.?-D/) {
- push(@defines, $part);
- } elsif ($part =~ /^-/) {
- push(@cflags, $part);
- } elsif ($part =~ /\.(c|cc|cpp)$/) {
- push(@sources, $part);
- } else {
- die "Unhandled compiler option @ line $lineno: $part";
- }
- }
- #print "Sources: @sources\nCFlags: @cflags\nDefine: @defines\nIncpat: @incpaths\n";
- #exit(1);
-}
-
-sub handleLibLine
-{
- my ($line, $lineno) = @_;
- my (@objfiles, @lflags, $libout, $part);
- # kill cmd and rm 'prefix'
- $line =~ s/^rm -f .* && .* rcs //;
- my @parts = split(' ', $line);
- while ($part = shift @parts) {
- if ($part =~ /^-/) {
- push(@lflags, $part);
- } elsif ($part =~ /\.(o|obj)$/) {
- push(@objfiles, $part);
- } elsif ($part =~ /\.(a|lib)$/) {
- $libout = $part;
- } else {
- die "Unhandled lib option @ line $lineno: $part";
- }
- }
- #print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
- #exit(1);
- removeDuplicates();
- push(@{$build_structure{"LIBS"}}, $libout);
- @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
- "_OBJECTS");
- @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
- @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
- @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
- @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
- @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
- clearCompileStep();
-}
-
-sub handleLinkLine
-{
- my ($line, $lineno) = @_;
- my (@objfiles, @lflags, @libs, $appout, $part);
- my @parts = split(' ', $line);
- shift(@parts); # ignore cmd
- while ($part = shift @parts) {
- if ($part =~ /^-[GRIDO]/) {
- # eat compiler flags
- } elsif ("$part" eq "-o") {
- $appout = shift @parts;
- } elsif ($part =~ /^-/) {
- push(@lflags, $part);
- } elsif ($part =~ /\.(a|lib)$/) {
- push(@libs, $part);
- } elsif ($part =~ /\.(o|obj)$/) {
- push(@objfiles, $part);
- } else {
- die "Unhandled lib option @ line $lineno: $part";
- }
- }
- #print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n";
- #exit(1);
- removeDuplicates();
- push(@{$build_structure{"APPS"}}, $appout);
- @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
- "_SOURCES", "_OBJECTS", "_LIBS");
- @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
- @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
- @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
- @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
- @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
- @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
- @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
- clearCompileStep();
-}
diff --git a/convert.c b/convert.c
index 8783e17941..b5f7cf6306 100644
--- a/convert.c
+++ b/convert.c
@@ -8,7 +8,7 @@
#include "copy.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "object-file.h"
#include "attr.h"
#include "run-command.h"
#include "quote.h"
diff --git a/diffcore-rename.c b/diffcore-rename.c
index 179731462b..7723bc3334 100644
--- a/diffcore-rename.c
+++ b/diffcore-rename.c
@@ -8,7 +8,7 @@
#include "git-compat-util.h"
#include "diff.h"
#include "diffcore.h"
-#include "object-store.h"
+#include "object-file.h"
#include "hashmap.h"
#include "mem-pool.h"
#include "oid-array.h"
diff --git a/dir.c b/dir.c
index 5c4675b4ac..a374972b62 100644
--- a/dir.c
+++ b/dir.c
@@ -17,7 +17,7 @@
#include "environment.h"
#include "gettext.h"
#include "name-hash.h"
-#include "object-store.h"
+#include "object-file.h"
#include "path.h"
#include "refs.h"
#include "repository.h"
@@ -518,7 +518,8 @@ static int do_match_pathspec(struct index_state *istate,
( exclude && !(ps->items[i].magic & PATHSPEC_EXCLUDE)))
continue;
- if (seen && seen[i] == MATCHED_EXACTLY)
+ if (seen && seen[i] == MATCHED_EXACTLY &&
+ ps->items[i].nowildcard_len == ps->items[i].len)
continue;
/*
* Make exclude patterns optional and never report
diff --git a/fetch-pack.c b/fetch-pack.c
index 210dc30d50..fa4231fee7 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -769,9 +769,7 @@ static void mark_complete_and_common_ref(struct fetch_negotiator *negotiator,
if (!commit) {
struct object *o;
- if (!repo_has_object_file_with_flags(the_repository, &ref->old_oid,
- OBJECT_INFO_QUICK |
- OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (!has_object(the_repository, &ref->old_oid, 0))
continue;
o = parse_object(the_repository, &ref->old_oid);
if (!o || o->type != OBJ_COMMIT)
@@ -1985,7 +1983,8 @@ static void update_shallow(struct fetch_pack_args *args,
struct oid_array extra = OID_ARRAY_INIT;
struct object_id *oid = si->shallow->oid;
for (i = 0; i < si->shallow->nr; i++)
- if (repo_has_object_file(the_repository, &oid[i]))
+ if (has_object(the_repository, &oid[i],
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
oid_array_append(&extra, &oid[i]);
if (extra.nr) {
setup_alternate_shallow(&shallow_lock,
diff --git a/git-send-email.perl b/git-send-email.perl
index 1f613fa979..659e6c588b 100755
--- a/git-send-email.perl
+++ b/git-send-email.perl
@@ -41,6 +41,8 @@ git send-email --translate-aliases
--subject <str> * Email "Subject:"
--reply-to <str> * Email "Reply-To:"
--in-reply-to <str> * Email "In-Reply-To:"
+ --[no-]outlook-id-fix * The SMTP host is an Outlook server that munges the
+ Message-ID. Retrieve it from the server.
--[no-]xmailer * Add "X-Mailer:" header (default).
--[no-]annotate * Review each patch that will be sent in an editor.
--compose * Open an editor for introduction.
@@ -68,7 +70,7 @@ git send-email --translate-aliases
--smtp-auth <str> * Space-separated list of allowed AUTH mechanisms, or
"none" to disable authentication.
This setting forces to use one of the listed mechanisms.
- --no-smtp-auth Disable SMTP authentication. Shorthand for
+ --no-smtp-auth * Disable SMTP authentication. Shorthand for
`--smtp-auth=none`
--smtp-debug <0|1> * Disable, enable Net::SMTP debug.
@@ -290,6 +292,7 @@ my $validate = 1;
my $mailmap = 0;
my $target_xfer_encoding = 'auto';
my $forbid_sendmail_variables = 1;
+my $outlook_id_fix = 'auto';
my %config_bool_settings = (
"thread" => \$thread,
@@ -305,6 +308,7 @@ my %config_bool_settings = (
"xmailer" => \$use_xmailer,
"forbidsendmailvariables" => \$forbid_sendmail_variables,
"mailmap" => \$mailmap,
+ "outlookidfix" => \$outlook_id_fix,
);
my %config_settings = (
@@ -551,6 +555,7 @@ my %options = (
"relogin-delay=i" => \$relogin_delay,
"git-completion-helper" => \$git_completion_helper,
"v=s" => \$reroll_count,
+ "outlook-id-fix!" => \$outlook_id_fix,
);
$rc = GetOptions(%options);
@@ -1354,7 +1359,9 @@ sub process_address_list {
sub valid_fqdn {
my $domain = shift;
- return defined $domain && !($^O eq 'darwin' && $domain =~ /\.local$/) && $domain =~ /\./;
+ my $subdomain = '(?!-)[A-Za-z0-9-]{1,63}(?<!-)';
+ return defined $domain && !($^O eq 'darwin' && $domain =~ /\.local$/)
+ && $domain =~ /^$subdomain(?:\.$subdomain)*$/;
}
sub maildomain_net {
@@ -1386,8 +1393,22 @@ sub maildomain_mta {
return $maildomain;
}
+sub maildomain_hostname_command {
+ my $maildomain;
+
+ if ($^O eq 'linux' || $^O eq 'darwin') {
+ my $domain = `(hostname -f) 2>/dev/null`;
+ if (!$?) {
+ chomp($domain);
+ $maildomain = $domain if valid_fqdn($domain);
+ }
+ }
+ return $maildomain;
+}
+
sub maildomain {
- return maildomain_net() || maildomain_mta() || 'localhost.localdomain';
+ return maildomain_net() || maildomain_mta() ||
+ maildomain_hostname_command || 'localhost.localdomain';
}
sub smtp_host_string {
@@ -1574,6 +1595,16 @@ Message-ID: $message_id
return ($recipients_ref, $to, $date, $gitversion, $cc, $ccline, $header);
}
+sub is_outlook {
+ my ($host) = @_;
+ if ($outlook_id_fix eq 'auto') {
+ $outlook_id_fix =
+ ($host eq 'smtp.office365.com' ||
+ $host eq 'smtp-mail.outlook.com') ? 1 : 0;
+ }
+ return $outlook_id_fix;
+}
+
# Prepares the email, then asks the user what to do.
#
# If the user chooses to send the email, it's sent and 1 is returned.
@@ -1737,6 +1768,22 @@ EOF
$smtp->datasend("$line") or die $smtp->message;
}
$smtp->dataend() or die $smtp->message;
+
+ # Outlook discards the Message-ID header we set while sending the email
+ # and generates a new random Message-ID. So in order to avoid breaking
+ # threads, we simply retrieve the Message-ID from the server response
+ # and assign it to the $message_id variable, which will then be
+ # assigned to $in_reply_to by the caller when the next message is sent
+ # as a response to this message.
+ if (is_outlook($smtp_server)) {
+ if ($smtp->message =~ /<([^>]+)>/) {
+ $message_id = "<$1>";
+ printf __("Outlook reassigned Message-ID to: %s\n"), $message_id;
+ } else {
+ warn __("Warning: Could not retrieve Message-ID from server response.\n");
+ }
+ }
+
$smtp->code =~ /250|200/ or die sprintf(__("Failed to send %s\n"), $subject).$smtp->message;
}
if ($quiet) {
diff --git a/http-push.c b/http-push.c
index 32e37565f4..f9e67cabd4 100644
--- a/http-push.c
+++ b/http-push.c
@@ -1446,7 +1446,9 @@ static void one_remote_ref(const char *refname)
* Fetch a copy of the object if it doesn't exist locally - it
* may be required for updating server info later.
*/
- if (repo->can_update_info_refs && !repo_has_object_file(the_repository, &ref->old_oid)) {
+ if (repo->can_update_info_refs &&
+ !has_object(the_repository, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
obj = lookup_unknown_object(the_repository, &ref->old_oid);
fprintf(stderr, " fetch %s for %s\n",
oid_to_hex(&ref->old_oid), refname);
@@ -1651,14 +1653,14 @@ static int delete_remote_branch(const char *pattern, int force)
return error("Remote HEAD symrefs too deep");
if (is_null_oid(&head_oid))
return error("Unable to resolve remote HEAD");
- if (!repo_has_object_file(the_repository, &head_oid))
+ if (!has_object(the_repository, &head_oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return error("Remote HEAD resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", oid_to_hex(&head_oid));
/* Remote branch must resolve to a known object */
if (is_null_oid(&remote_ref->old_oid))
return error("Unable to resolve remote branch %s",
remote_ref->name);
- if (!repo_has_object_file(the_repository, &remote_ref->old_oid))
+ if (!has_object(the_repository, &remote_ref->old_oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return error("Remote branch %s resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", remote_ref->name, oid_to_hex(&remote_ref->old_oid));
/* Remote branch must be an ancestor of remote HEAD */
@@ -1879,7 +1881,8 @@ int cmd_main(int argc, const char **argv)
if (!force_all &&
!is_null_oid(&ref->old_oid) &&
!ref->force) {
- if (!repo_has_object_file(the_repository, &ref->old_oid) ||
+ if (!has_object(the_repository, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) ||
!ref_newer(&ref->peer_ref->new_oid,
&ref->old_oid)) {
/*
diff --git a/http-walker.c b/http-walker.c
index 882cae19c2..463f7b119a 100644
--- a/http-walker.c
+++ b/http-walker.c
@@ -9,6 +9,7 @@
#include "list.h"
#include "transport.h"
#include "packfile.h"
+#include "object-file.h"
#include "object-store.h"
struct alt_base {
@@ -137,7 +138,8 @@ static int fill_active_slot(void *data UNUSED)
list_for_each_safe(pos, tmp, head) {
obj_req = list_entry(pos, struct object_request, node);
if (obj_req->state == WAITING) {
- if (repo_has_object_file(the_repository, &obj_req->oid))
+ if (has_object(the_repository, &obj_req->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
obj_req->state = COMPLETE;
else {
start_object_request(obj_req);
@@ -495,7 +497,8 @@ static int fetch_object(struct walker *walker, const struct object_id *oid)
if (!obj_req)
return error("Couldn't find request for %s in the queue", hex);
- if (repo_has_object_file(the_repository, &obj_req->oid)) {
+ if (has_object(the_repository, &obj_req->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
if (obj_req->req)
abort_http_object_request(&obj_req->req);
abort_object_request(obj_req);
@@ -540,7 +543,7 @@ static int fetch_object(struct walker *walker, const struct object_id *oid)
ret = error("File %s has bad hash", hex);
} else if (req->rename < 0) {
struct strbuf buf = STRBUF_INIT;
- loose_object_path(the_repository, &buf, &req->oid);
+ odb_loose_path(the_repository->objects->odb, &buf, &req->oid);
ret = error("unable to write sha1 filename %s", buf.buf);
strbuf_release(&buf);
}
diff --git a/http.c b/http.c
index 0c41138042..3c029cf894 100644
--- a/http.c
+++ b/http.c
@@ -2662,7 +2662,7 @@ struct http_object_request *new_http_object_request(const char *base_url,
oidcpy(&freq->oid, oid);
freq->localfile = -1;
- loose_object_path(the_repository, &filename, oid);
+ odb_loose_path(the_repository->objects->odb, &filename, oid);
strbuf_addf(&freq->tmpfile, "%s.temp", filename.buf);
strbuf_addf(&prevfile, "%s.prev", filename.buf);
@@ -2814,7 +2814,7 @@ int finish_http_object_request(struct http_object_request *freq)
unlink_or_warn(freq->tmpfile.buf);
return -1;
}
- loose_object_path(the_repository, &filename, &freq->oid);
+ odb_loose_path(the_repository->objects->odb, &filename, &freq->oid);
freq->rename = finalize_object_file(freq->tmpfile.buf, filename.buf);
strbuf_release(&filename);
diff --git a/json-writer.c b/json-writer.c
index 8c5187e9fd..34577dc25f 100644
--- a/json-writer.c
+++ b/json-writer.c
@@ -268,10 +268,6 @@ static void append_sub_jw(struct json_writer *jw,
strbuf_addbuf(&jw->json, &value->json);
}
-/*
- * Append existing (properly terminated) JSON sub-data (object or array)
- * as-is onto the given JSON data.
- */
void jw_object_sub_jw(struct json_writer *jw, const char *key,
const struct json_writer *value)
{
diff --git a/json-writer.h b/json-writer.h
index 04413bd1af..8f845d4d29 100644
--- a/json-writer.h
+++ b/json-writer.h
@@ -28,6 +28,34 @@
* object/array) -or- by building them inline in one pass. This is a
* personal style and/or data shape choice.
*
+ * USAGE:
+ * ======
+ *
+ * - Initialize the json_writer with jw_init.
+ *
+ * - Open an object as the main data structure with jw_object_begin.
+ * Append a key-value pair to it using the jw_object_<type> functions.
+ * Conclude with jw_end.
+ *
+ * - Alternatively, open an array as the main data structure with
+ * jw_array_begin. Append a value to it using the jw_array_<type>
+ * functions. Conclude with jw_end.
+ *
+ * - Append a new, unterminated array or object to the current
+ * object using the jw_object_inline_begin_{array, object} functions.
+ * Similarly, append a new, unterminated array or object to
+ * the current array using the jw_array_inline_begin_{array, object}
+ * functions.
+ *
+ * - Append other json_writer as a value to the current array or object
+ * using the jw_{array, object}_sub_jw functions.
+ *
+ * - Extend the current array with an null-terminated array of strings
+ * by using jw_array_argv or with a fixed number of elements of a
+ * array of string by using jw_array_argc_argv.
+ *
+ * - Release the json_writer after using it by calling jw_release.
+ *
* See t/helper/test-json-writer.c for various usage examples.
*
* LIMITATIONS:
@@ -69,42 +97,185 @@ struct json_writer
.open_stack = STRBUF_INIT, \
}
+/*
+ * Initialize a json_writer with empty values.
+ */
void jw_init(struct json_writer *jw);
+
+/*
+ * Release the internal buffers of a json_writer.
+ */
void jw_release(struct json_writer *jw);
+/*
+ * Begin the json_writer using an object as the top-level data structure. If
+ * pretty is set to 1, the result will be a human-readable and indented JSON,
+ * and if it is set to 0 the result will be minified single-line JSON.
+ */
void jw_object_begin(struct json_writer *jw, int pretty);
+
+/*
+ * Begin the json_writer using an array as the top-level data structure. If
+ * pretty is set to 1, the result will be a human-readable and indented JSON,
+ * and if it is set to 0 the result will be minified single-line JSON.
+ */
void jw_array_begin(struct json_writer *jw, int pretty);
+/*
+ * Append a string field to the current object of the json_writer, given its key
+ * and its value. Trigger a BUG when not in an object.
+ */
void jw_object_string(struct json_writer *jw, const char *key,
const char *value);
+
+/*
+ * Append an int field to the current object of the json_writer, given its key
+ * and its value. Trigger a BUG when not in an object.
+ */
void jw_object_intmax(struct json_writer *jw, const char *key, intmax_t value);
+
+/*
+ * Append a double field to the current object of the json_writer, given its key
+ * and its value. The precision parameter defines the number of significant
+ * digits, where -1 can be used for maximum precision. Trigger a BUG when not in
+ * an object.
+ */
void jw_object_double(struct json_writer *jw, const char *key, int precision,
double value);
+
+/*
+ * Append a boolean field set to true to the current object of the json_writer,
+ * given its key. Trigger a BUG when not in an object.
+ */
void jw_object_true(struct json_writer *jw, const char *key);
+
+/*
+ * Append a boolean field set to false to the current object of the json_writer,
+ * given its key. Trigger a BUG when not in an object.
+ */
void jw_object_false(struct json_writer *jw, const char *key);
+
+/*
+ * Append a boolean field to the current object of the json_writer, given its
+ * key and its value. Trigger a BUG when not in an object.
+ */
void jw_object_bool(struct json_writer *jw, const char *key, int value);
+
+/*
+ * Append a null field to the current object of the json_writer, given its key.
+ * Trigger a BUG when not in an object.
+ */
void jw_object_null(struct json_writer *jw, const char *key);
+
+/*
+ * Append a field to the current object of the json_writer, given its key and
+ * another json_writer that represents its content. Trigger a BUG when not in
+ * an object.
+ */
void jw_object_sub_jw(struct json_writer *jw, const char *key,
const struct json_writer *value);
+/*
+ * Start an object as the value of a field in the current object of the
+ * json_writer. Trigger a BUG when not in an object.
+ */
void jw_object_inline_begin_object(struct json_writer *jw, const char *key);
+
+/*
+ * Start an array as the value of a field in the current object of the
+ * json_writer. Trigger a BUG when not in an object.
+ */
void jw_object_inline_begin_array(struct json_writer *jw, const char *key);
+/*
+ * Append a string value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_string(struct json_writer *jw, const char *value);
+
+/*
+ * Append an int value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_intmax(struct json_writer *jw, intmax_t value);
+
+/*
+ * Append a double value to the current array of the json_writer. The precision
+ * parameter defines the number of significant digits, where -1 can be used for
+ * maximum precision. Trigger a BUG when not in an array.
+ */
void jw_array_double(struct json_writer *jw, int precision, double value);
+
+/*
+ * Append a true value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_true(struct json_writer *jw);
+
+/*
+ * Append a false value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_false(struct json_writer *jw);
+
+/*
+ * Append a boolean value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_bool(struct json_writer *jw, int value);
+
+/*
+ * Append a null value to the current array of the json_writer. Trigger a BUG
+ * when not in an array.
+ */
void jw_array_null(struct json_writer *jw);
+
+/*
+ * Append a json_writer as a value to the current array of the
+ * json_writer. Trigger a BUG when not in an array.
+ */
void jw_array_sub_jw(struct json_writer *jw, const struct json_writer *value);
+
+/*
+ * Append the first argc values from the argv array of strings to the current
+ * array of the json_writer. Trigger a BUG when not in an array.
+ *
+ * This function does not provide safety for cases where the array has less than
+ * argc values.
+ */
void jw_array_argc_argv(struct json_writer *jw, int argc, const char **argv);
+
+/*
+ * Append a null-terminated array of strings to the current array of the
+ * json_writer. Trigger a BUG when not in an array.
+ */
void jw_array_argv(struct json_writer *jw, const char **argv);
+/*
+ * Start an object as a value in the current array of the json_writer. Trigger a
+ * BUG when not in an array.
+ */
void jw_array_inline_begin_object(struct json_writer *jw);
+
+/*
+ * Start an array as a value in the current array. Trigger a BUG when not in an
+ * array.
+ */
void jw_array_inline_begin_array(struct json_writer *jw);
+/*
+ * Return whether the json_writer is terminated. In other words, if the all the
+ * objects and arrays are already closed.
+ */
int jw_is_terminated(const struct json_writer *jw);
+
+/*
+ * Terminates the current object or array of the json_writer. In other words,
+ * append a ] if the current array is not closed or } if the current object
+ * is not closed.
+ *
+ * Abort the execution if there's no object or array that can be terminated.
+ */
void jw_end(struct json_writer *jw);
#endif /* JSON_WRITER_H */
diff --git a/list-objects-filter.c b/list-objects-filter.c
index 7765761b3c..78b397bc19 100644
--- a/list-objects-filter.c
+++ b/list-objects-filter.c
@@ -244,7 +244,7 @@ static void filter_trees_free(void *filter_data) {
struct filter_trees_depth_data *d = filter_data;
if (!d)
return;
- oidmap_free(&d->seen_at_depth, 1);
+ oidmap_clear(&d->seen_at_depth, 1);
free(d);
}
diff --git a/list-objects.c b/list-objects.c
index 1e5512e131..597114281f 100644
--- a/list-objects.c
+++ b/list-objects.c
@@ -74,7 +74,8 @@ static void process_blob(struct traversal_context *ctx,
* of missing objects.
*/
if (ctx->revs->exclude_promisor_objects &&
- !repo_has_object_file(the_repository, &obj->oid) &&
+ !has_object(the_repository, &obj->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
is_promisor_object(ctx->revs->repo, &obj->oid))
return;
diff --git a/log-tree.c b/log-tree.c
index a4d4ab59ca..1d05dc1c70 100644
--- a/log-tree.c
+++ b/log-tree.c
@@ -9,7 +9,7 @@
#include "environment.h"
#include "hex.h"
#include "object-name.h"
-#include "object-store.h"
+#include "object-file.h"
#include "repository.h"
#include "tmp-objdir.h"
#include "commit.h"
diff --git a/mailinfo.c b/mailinfo.c
index 7b001fa5db..ee4597da6b 100644
--- a/mailinfo.c
+++ b/mailinfo.c
@@ -381,12 +381,12 @@ static int is_format_patch_separator(const char *line, int len)
return !memcmp(SAMPLE + (cp - line), cp, strlen(SAMPLE) - (cp - line));
}
-static struct strbuf *decode_q_segment(const struct strbuf *q_seg, int rfc2047)
+static int decode_q_segment(struct strbuf *out, const struct strbuf *q_seg,
+ int rfc2047)
{
const char *in = q_seg->buf;
int c;
- struct strbuf *out = xmalloc(sizeof(struct strbuf));
- strbuf_init(out, q_seg->len);
+ strbuf_grow(out, q_seg->len);
while ((c = *in++) != 0) {
if (c == '=') {
@@ -405,16 +405,15 @@ static struct strbuf *decode_q_segment(const struct strbuf *q_seg, int rfc2047)
c = 0x20;
strbuf_addch(out, c);
}
- return out;
+ return 0;
}
-static struct strbuf *decode_b_segment(const struct strbuf *b_seg)
+static int decode_b_segment(struct strbuf *out, const struct strbuf *b_seg)
{
/* Decode in..ep, possibly in-place to ot */
int c, pos = 0, acc = 0;
const char *in = b_seg->buf;
- struct strbuf *out = xmalloc(sizeof(struct strbuf));
- strbuf_init(out, b_seg->len);
+ strbuf_grow(out, b_seg->len);
while ((c = *in++) != 0) {
if (c == '+')
@@ -447,7 +446,7 @@ static struct strbuf *decode_b_segment(const struct strbuf *b_seg)
break;
}
}
- return out;
+ return 0;
}
static int convert_to_utf8(struct mailinfo *mi,
@@ -475,7 +474,7 @@ static int convert_to_utf8(struct mailinfo *mi,
static void decode_header(struct mailinfo *mi, struct strbuf *it)
{
char *in, *ep, *cp;
- struct strbuf outbuf = STRBUF_INIT, *dec;
+ struct strbuf outbuf = STRBUF_INIT, dec = STRBUF_INIT;
struct strbuf charset_q = STRBUF_INIT, piecebuf = STRBUF_INIT;
int found_error = 1; /* pessimism */
@@ -530,18 +529,19 @@ static void decode_header(struct mailinfo *mi, struct strbuf *it)
default:
goto release_return;
case 'b':
- dec = decode_b_segment(&piecebuf);
+ if ((found_error = decode_b_segment(&dec, &piecebuf)))
+ goto release_return;
break;
case 'q':
- dec = decode_q_segment(&piecebuf, 1);
+ if ((found_error = decode_q_segment(&dec, &piecebuf, 1)))
+ goto release_return;
break;
}
- if (convert_to_utf8(mi, dec, charset_q.buf))
+ if (convert_to_utf8(mi, &dec, charset_q.buf))
goto release_return;
- strbuf_addbuf(&outbuf, dec);
- strbuf_release(dec);
- free(dec);
+ strbuf_addbuf(&outbuf, &dec);
+ strbuf_release(&dec);
in = ep + 2;
}
strbuf_addstr(&outbuf, in);
@@ -552,6 +552,7 @@ release_return:
strbuf_release(&outbuf);
strbuf_release(&charset_q);
strbuf_release(&piecebuf);
+ strbuf_release(&dec);
if (found_error)
mi->input_error = -1;
@@ -634,23 +635,22 @@ static int is_inbody_header(const struct mailinfo *mi,
static void decode_transfer_encoding(struct mailinfo *mi, struct strbuf *line)
{
- struct strbuf *ret;
+ struct strbuf ret = STRBUF_INIT;
switch (mi->transfer_encoding) {
case TE_QP:
- ret = decode_q_segment(line, 0);
+ decode_q_segment(&ret, line, 0);
break;
case TE_BASE64:
- ret = decode_b_segment(line);
+ decode_b_segment(&ret, line);
break;
case TE_DONTCARE:
default:
return;
}
strbuf_reset(line);
- strbuf_addbuf(line, ret);
- strbuf_release(ret);
- free(ret);
+ strbuf_addbuf(line, &ret);
+ strbuf_release(&ret);
}
static inline int patchbreak(const struct strbuf *line)
diff --git a/meson.build b/meson.build
index 270ce933d0..a1476e5b32 100644
--- a/meson.build
+++ b/meson.build
@@ -243,7 +243,11 @@ shell = find_program('sh', dirs: program_path, native: true)
tar = find_program('tar', dirs: program_path, native: true)
time = find_program('time', dirs: program_path, required: get_option('benchmarks'))
-target_shell = find_program('sh', dirs: program_path, native: false)
+# Detect the target shell that is used by Git at runtime. Note that we prefer
+# "/bin/sh" over a PATH-based lookup, which provides a working shell on most
+# supported systems. This path is also the default shell path used by our
+# Makefile. This lookup can be overridden via `program_path`.
+target_shell = find_program('sh', dirs: program_path + [ '/bin' ], native: false)
# Sanity-check that programs required for the build exist.
foreach tool : ['cat', 'cut', 'grep', 'sort', 'tr', 'uname']
@@ -1900,14 +1904,19 @@ if perl_features_enabled
perl_header_template = 'perl/header_templates/runtime_prefix.template.pl'
endif
+ perllibdir = get_option('perllibdir')
+ if perllibdir == ''
+ perllibdir = get_option('datadir') / 'perl5'
+ endif
+
perl_header = configure_file(
input: perl_header_template,
output: 'GIT-PERL-HEADER',
configuration: {
'GITEXECDIR_REL': get_option('libexecdir') / 'git-core',
- 'PERLLIBDIR_REL': get_option('datadir') / 'perl5',
+ 'PERLLIBDIR_REL': perllibdir,
'LOCALEDIR_REL': get_option('datadir') / 'locale',
- 'INSTLIBDIR': get_option('datadir') / 'perl5',
+ 'INSTLIBDIR': perllibdir,
'PATHSEP': pathsep,
},
)
@@ -2174,3 +2183,9 @@ summary({
'sha256': sha256_backend,
'zlib': zlib_backend,
}, section: 'Backends')
+
+summary({
+ 'perl': target_perl,
+ 'python': target_python,
+ 'shell': target_shell,
+}, section: 'Runtime executable paths')
diff --git a/meson_options.txt b/meson_options.txt
index 8547c0eb47..54c63614d4 100644
--- a/meson_options.txt
+++ b/meson_options.txt
@@ -1,3 +1,7 @@
+# Configuration for Git installation
+option('perllibdir', type: 'string', value: '',
+ description: 'Directory to install perl lib to. Defaults to <datadir>/perl5')
+
# Configuration for how Git behaves at runtime.
option('default_pager', type: 'string', value: 'less',
description: 'Fall-back pager.')
diff --git a/notes.c b/notes.c
index d9645c4b5d..0a128f1de9 100644
--- a/notes.c
+++ b/notes.c
@@ -794,7 +794,8 @@ static int prune_notes_helper(const struct object_id *object_oid,
struct note_delete_list **l = (struct note_delete_list **) cb_data;
struct note_delete_list *n;
- if (repo_has_object_file(the_repository, object_oid))
+ if (has_object(the_repository, object_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0; /* nothing to do for this note */
/* failed to find object => prune this note */
diff --git a/object-file.c b/object-file.c
index 9cc3a24a40..1ac04c2891 100644
--- a/object-file.c
+++ b/object-file.c
@@ -130,12 +130,6 @@ int has_loose_object(const struct object_id *oid)
return check_and_freshen(oid, 0);
}
-static int format_object_header_literally(char *str, size_t size,
- const char *type, size_t objsize)
-{
- return xsnprintf(str, size, "%s %"PRIuMAX, type, (uintmax_t)objsize) + 1;
-}
-
int format_object_header(char *str, size_t size, enum object_type type,
size_t objsize)
{
@@ -144,7 +138,7 @@ int format_object_header(char *str, size_t size, enum object_type type,
if (!name)
BUG("could not get a type name for 'enum object_type' value %d", type);
- return format_object_header_literally(str, size, name, objsize);
+ return xsnprintf(str, size, "%s %"PRIuMAX, name, (uintmax_t)objsize) + 1;
}
int check_object_signature(struct repository *r, const struct object_id *oid,
@@ -299,8 +293,7 @@ enum unpack_loose_header_result unpack_loose_header(git_zstream *stream,
unsigned char *map,
unsigned long mapsize,
void *buffer,
- unsigned long bufsiz,
- struct strbuf *header)
+ unsigned long bufsiz)
{
int status;
@@ -325,32 +318,9 @@ enum unpack_loose_header_result unpack_loose_header(git_zstream *stream,
return ULHR_OK;
/*
- * We have a header longer than MAX_HEADER_LEN. The "header"
- * here is only non-NULL when we run "cat-file
- * --allow-unknown-type".
+ * We have a header longer than MAX_HEADER_LEN.
*/
- if (!header)
- return ULHR_TOO_LONG;
-
- /*
- * buffer[0..bufsiz] was not large enough. Copy the partial
- * result out to header, and then append the result of further
- * reading the stream.
- */
- strbuf_add(header, buffer, stream->next_out - (unsigned char *)buffer);
-
- do {
- stream->next_out = buffer;
- stream->avail_out = bufsiz;
-
- obj_read_unlock();
- status = git_inflate(stream, 0);
- obj_read_lock();
- strbuf_add(header, buffer, stream->next_out - (unsigned char *)buffer);
- if (memchr(buffer, '\0', stream->next_out - (unsigned char *)buffer))
- return 0;
- } while (status == Z_OK);
- return ULHR_BAD;
+ return ULHR_TOO_LONG;
}
static void *unpack_loose_rest(git_zstream *stream,
@@ -427,8 +397,6 @@ int parse_loose_header(const char *hdr, struct object_info *oi)
}
type = type_from_string_gently(type_buf, type_len, 1);
- if (oi->type_name)
- strbuf_add(oi->type_name, type_buf, type_len);
if (oi->typep)
*oi->typep = type;
@@ -476,10 +444,8 @@ int loose_object_info(struct repository *r,
void *map;
git_zstream stream;
char hdr[MAX_HEADER_LEN];
- struct strbuf hdrbuf = STRBUF_INIT;
unsigned long size_scratch;
enum object_type type_scratch;
- int allow_unknown = flags & OBJECT_INFO_ALLOW_UNKNOWN_TYPE;
if (oi->delta_base_oid)
oidclr(oi->delta_base_oid, the_repository->hash_algo);
@@ -492,7 +458,7 @@ int loose_object_info(struct repository *r,
* return value implicitly indicates whether the
* object even exists.
*/
- if (!oi->typep && !oi->type_name && !oi->sizep && !oi->contentp) {
+ if (!oi->typep && !oi->sizep && !oi->contentp) {
struct stat st;
if (!oi->disk_sizep && (flags & OBJECT_INFO_QUICK))
return quick_has_loose(r, oid) ? 0 : -1;
@@ -521,18 +487,15 @@ int loose_object_info(struct repository *r,
if (oi->disk_sizep)
*oi->disk_sizep = mapsize;
- switch (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr),
- allow_unknown ? &hdrbuf : NULL)) {
+ switch (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr))) {
case ULHR_OK:
- if (parse_loose_header(hdrbuf.len ? hdrbuf.buf : hdr, oi) < 0)
+ if (parse_loose_header(hdr, oi) < 0)
status = error(_("unable to parse %s header"), oid_to_hex(oid));
- else if (!allow_unknown && *oi->typep < 0)
+ else if (*oi->typep < 0)
die(_("invalid object type"));
if (!oi->contentp)
break;
- if (hdrbuf.len)
- BUG("unpacking content with unknown types not yet supported");
*oi->contentp = unpack_loose_rest(&stream, hdr, *oi->sizep, oid);
if (*oi->contentp)
goto cleanup;
@@ -558,7 +521,6 @@ cleanup:
munmap(map, mapsize);
if (oi->sizep == &size_scratch)
oi->sizep = NULL;
- strbuf_release(&hdrbuf);
if (oi->typep == &type_scratch)
oi->typep = NULL;
oi->whence = OI_LOOSE;
@@ -590,17 +552,6 @@ static void write_object_file_prepare(const struct git_hash_algo *algo,
hash_object_body(algo, &c, buf, len, oid, hdr, hdrlen);
}
-static void write_object_file_prepare_literally(const struct git_hash_algo *algo,
- const void *buf, unsigned long len,
- const char *type, struct object_id *oid,
- char *hdr, int *hdrlen)
-{
- struct git_hash_ctx c;
-
- *hdrlen = format_object_header_literally(hdr, *hdrlen, type, len);
- hash_object_body(algo, &c, buf, len, oid, hdr, hdrlen);
-}
-
#define CHECK_COLLISION_DEST_VANISHED -2
static int check_collision(const char *source, const char *dest)
@@ -730,21 +681,14 @@ out:
return 0;
}
-static void hash_object_file_literally(const struct git_hash_algo *algo,
- const void *buf, unsigned long len,
- const char *type, struct object_id *oid)
-{
- char hdr[MAX_HEADER_LEN];
- int hdrlen = sizeof(hdr);
-
- write_object_file_prepare_literally(algo, buf, len, type, oid, hdr, &hdrlen);
-}
-
void hash_object_file(const struct git_hash_algo *algo, const void *buf,
unsigned long len, enum object_type type,
struct object_id *oid)
{
- hash_object_file_literally(algo, buf, len, type_name(type), oid);
+ char hdr[MAX_HEADER_LEN];
+ int hdrlen = sizeof(hdr);
+
+ write_object_file_prepare(algo, buf, len, type, oid, hdr, &hdrlen);
}
/* Finalize a file on disk, and close it. */
@@ -932,7 +876,7 @@ static int write_loose_object(const struct object_id *oid, char *hdr,
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
prepare_loose_object_bulk_checkin();
- loose_object_path(the_repository, &filename, oid);
+ odb_loose_path(the_repository->objects->odb, &filename, oid);
fd = start_loose_object_common(&tmp_file, filename.buf, flags,
&stream, compressed, sizeof(compressed),
@@ -1079,7 +1023,7 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
goto cleanup;
}
- loose_object_path(the_repository, &filename, oid);
+ odb_loose_path(the_repository->objects->odb, &filename, oid);
/* We finally know the object path, and create the missing dir. */
dirlen = directory_size(filename.buf);
@@ -1146,53 +1090,6 @@ int write_object_file_flags(const void *buf, unsigned long len,
return 0;
}
-int write_object_file_literally(const void *buf, unsigned long len,
- const char *type, struct object_id *oid,
- unsigned flags)
-{
- char *header;
- struct repository *repo = the_repository;
- const struct git_hash_algo *algo = repo->hash_algo;
- const struct git_hash_algo *compat = repo->compat_hash_algo;
- struct object_id compat_oid;
- int hdrlen, status = 0;
- int compat_type = -1;
-
- if (compat) {
- compat_type = type_from_string_gently(type, -1, 1);
- if (compat_type == OBJ_BLOB)
- hash_object_file(compat, buf, len, compat_type,
- &compat_oid);
- else if (compat_type != -1) {
- struct strbuf converted = STRBUF_INIT;
- convert_object_file(the_repository,
- &converted, algo, compat,
- buf, len, compat_type, 0);
- hash_object_file(compat, converted.buf, converted.len,
- compat_type, &compat_oid);
- strbuf_release(&converted);
- }
- }
-
- /* type string, SP, %lu of the length plus NUL must fit this */
- hdrlen = strlen(type) + MAX_HEADER_LEN;
- header = xmalloc(hdrlen);
- write_object_file_prepare_literally(the_hash_algo, buf, len, type,
- oid, header, &hdrlen);
-
- if (!(flags & WRITE_OBJECT_FILE_PERSIST))
- goto cleanup;
- if (freshen_packed_object(oid) || freshen_loose_object(oid))
- goto cleanup;
- status = write_loose_object(oid, header, hdrlen, buf, len, 0, 0);
- if (compat_type != -1)
- return repo_add_loose_object_map(repo, oid, &compat_oid);
-
-cleanup:
- free(header);
- return status;
-}
-
int force_object_loose(const struct object_id *oid, time_t mtime)
{
struct repository *repo = the_repository;
@@ -1682,8 +1579,7 @@ int read_loose_object(const char *path,
goto out;
}
- if (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr),
- NULL) != ULHR_OK) {
+ if (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr)) != ULHR_OK) {
error(_("unable to unpack header of %s"), path);
goto out_inflate;
}
@@ -1693,6 +1589,12 @@ int read_loose_object(const char *path,
goto out_inflate;
}
+ if (*oi->typep < 0) {
+ error(_("unable to parse type from header '%s' of %s"),
+ hdr, path);
+ goto out_inflate;
+ }
+
if (*oi->typep == OBJ_BLOB &&
*size > repo_settings_get_big_file_threshold(the_repository)) {
if (check_stream_oid(&stream, hdr, *size, path, expected_oid) < 0)
@@ -1703,9 +1605,9 @@ int read_loose_object(const char *path,
error(_("unable to unpack contents of %s"), path);
goto out_inflate;
}
- hash_object_file_literally(the_repository->hash_algo,
- *contents, *size,
- oi->type_name->buf, real_oid);
+ hash_object_file(the_repository->hash_algo,
+ *contents, *size,
+ *oi->typep, real_oid);
if (!oideq(expected_oid, real_oid))
goto out_inflate;
}
diff --git a/object-file.h b/object-file.h
index c002fbe234..6f41142452 100644
--- a/object-file.h
+++ b/object-file.h
@@ -3,6 +3,7 @@
#include "git-zlib.h"
#include "object.h"
+#include "object-store.h"
struct index_state;
@@ -25,6 +26,20 @@ int index_path(struct index_state *istate, struct object_id *oid, const char *pa
struct object_directory;
+/*
+ * Populate and return the loose object cache array corresponding to the
+ * given object ID.
+ */
+struct oidtree *odb_loose_cache(struct object_directory *odb,
+ const struct object_id *oid);
+
+/* Empty the loose object cache for the specified object directory. */
+void odb_clear_loose_cache(struct object_directory *odb);
+
+/*
+ * Put in `buf` the name of the file in the local object database that
+ * would be used to store a loose object with the specified oid.
+ */
const char *odb_loose_path(struct object_directory *odb,
struct strbuf *buf,
const struct object_id *oid);
@@ -38,6 +53,68 @@ int has_loose_object_nonlocal(const struct object_id *);
int has_loose_object(const struct object_id *);
+void *map_loose_object(struct repository *r, const struct object_id *oid,
+ unsigned long *size);
+
+/*
+ * Iterate over the files in the loose-object parts of the object
+ * directory "path", triggering the following callbacks:
+ *
+ * - loose_object is called for each loose object we find.
+ *
+ * - loose_cruft is called for any files that do not appear to be
+ * loose objects. Note that we only look in the loose object
+ * directories "objects/[0-9a-f]{2}/", so we will not report
+ * "objects/foobar" as cruft.
+ *
+ * - loose_subdir is called for each top-level hashed subdirectory
+ * of the object directory (e.g., "$OBJDIR/f0"). It is called
+ * after the objects in the directory are processed.
+ *
+ * Any callback that is NULL will be ignored. Callbacks returning non-zero
+ * will end the iteration.
+ *
+ * In the "buf" variant, "path" is a strbuf which will also be used as a
+ * scratch buffer, but restored to its original contents before
+ * the function returns.
+ */
+typedef int each_loose_object_fn(const struct object_id *oid,
+ const char *path,
+ void *data);
+typedef int each_loose_cruft_fn(const char *basename,
+ const char *path,
+ void *data);
+typedef int each_loose_subdir_fn(unsigned int nr,
+ const char *path,
+ void *data);
+int for_each_file_in_obj_subdir(unsigned int subdir_nr,
+ struct strbuf *path,
+ each_loose_object_fn obj_cb,
+ each_loose_cruft_fn cruft_cb,
+ each_loose_subdir_fn subdir_cb,
+ void *data);
+int for_each_loose_file_in_objdir(const char *path,
+ each_loose_object_fn obj_cb,
+ each_loose_cruft_fn cruft_cb,
+ each_loose_subdir_fn subdir_cb,
+ void *data);
+int for_each_loose_file_in_objdir_buf(struct strbuf *path,
+ each_loose_object_fn obj_cb,
+ each_loose_cruft_fn cruft_cb,
+ each_loose_subdir_fn subdir_cb,
+ void *data);
+
+/*
+ * Iterate over all accessible loose objects without respect to
+ * reachability. By default, this includes both local and alternate objects.
+ * The order in which objects are visited is unspecified.
+ *
+ * Any flags specific to packs are ignored.
+ */
+int for_each_loose_object(each_loose_object_fn, void *,
+ enum for_each_object_flags flags);
+
+
/**
* format_object_header() is a thin wrapper around s xsnprintf() that
* writes the initial "<type> <obj-len>" part of the loose object
@@ -56,12 +133,7 @@ int format_object_header(char *str, size_t size, enum object_type type,
* - ULHR_BAD on error
* - ULHR_TOO_LONG if the header was too long
*
- * It will only parse up to MAX_HEADER_LEN bytes unless an optional
- * "hdrbuf" argument is non-NULL. This is intended for use with
- * OBJECT_INFO_ALLOW_UNKNOWN_TYPE to extract the bad type for (error)
- * reporting. The full header will be extracted to "hdrbuf" for use
- * with parse_loose_header(), ULHR_TOO_LONG will still be returned
- * from this function to indicate that the header was too long.
+ * It will only parse up to MAX_HEADER_LEN bytes.
*/
enum unpack_loose_header_result {
ULHR_OK,
@@ -72,8 +144,7 @@ enum unpack_loose_header_result unpack_loose_header(git_zstream *stream,
unsigned char *map,
unsigned long mapsize,
void *buffer,
- unsigned long bufsiz,
- struct strbuf *hdrbuf);
+ unsigned long bufsiz);
/**
* parse_loose_header() parses the starting "<type> <len>\0" of an
@@ -88,7 +159,7 @@ int parse_loose_header(const char *hdr, struct object_info *oi);
enum {
/*
- * By default, `write_object_file_literally()` does not actually write
+ * By default, `write_object_file()` does not actually write
* anything into the object store, but only computes the object ID.
* This flag changes that so that the object will be written as a loose
* object and persisted.
@@ -103,7 +174,7 @@ enum {
int write_object_file_flags(const void *buf, unsigned long len,
enum object_type type, struct object_id *oid,
- struct object_id *comapt_oid_in, unsigned flags);
+ struct object_id *compat_oid_in, unsigned flags);
static inline int write_object_file(const void *buf, unsigned long len,
enum object_type type, struct object_id *oid)
{
@@ -116,9 +187,6 @@ struct input_stream {
int is_finished;
};
-int write_object_file_literally(const void *buf, unsigned long len,
- const char *type, struct object_id *oid,
- unsigned flags);
int stream_loose_object(struct input_stream *in_stream, size_t len,
struct object_id *oid);
@@ -154,6 +222,10 @@ int finalize_object_file(const char *tmpfile, const char *filename);
int finalize_object_file_flags(const char *tmpfile, const char *filename,
enum finalize_object_file_flags flags);
+void hash_object_file(const struct git_hash_algo *algo, const void *buf,
+ unsigned long len, enum object_type type,
+ struct object_id *oid);
+
/* Helper to check and "touch" a file */
int check_and_freshen_file(const char *fn, int freshen);
diff --git a/object-name.c b/object-name.c
index 2c751a5352..9288b2dd24 100644
--- a/object-name.c
+++ b/object-name.c
@@ -19,7 +19,7 @@
#include "oidtree.h"
#include "packfile.h"
#include "pretty.h"
-#include "object-store.h"
+#include "object-file.h"
#include "read-cache-ll.h"
#include "repo-settings.h"
#include "repository.h"
diff --git a/object-store.c b/object-store.c
index 6ab50d25d3..58cde0313a 100644
--- a/object-store.c
+++ b/object-store.c
@@ -83,25 +83,6 @@ int odb_mkstemp(struct strbuf *temp_filename, const char *pattern)
return xmkstemp_mode(temp_filename->buf, mode);
}
-int odb_pack_keep(const char *name)
-{
- int fd;
-
- fd = open(name, O_RDWR|O_CREAT|O_EXCL, 0600);
- if (0 <= fd)
- return fd;
-
- /* slow path */
- safe_create_leading_directories_const(the_repository, name);
- return open(name, O_RDWR|O_CREAT|O_EXCL, 0600);
-}
-
-const char *loose_object_path(struct repository *r, struct strbuf *buf,
- const struct object_id *oid)
-{
- return odb_loose_path(r->objects->odb, buf, oid);
-}
-
/*
* Return non-zero iff the path is usable as an alternate object database.
*/
@@ -665,8 +646,6 @@ static int do_oid_object_info_extended(struct repository *r,
*(oi->disk_sizep) = 0;
if (oi->delta_base_oid)
oidclr(oi->delta_base_oid, the_repository->hash_algo);
- if (oi->type_name)
- strbuf_addstr(oi->type_name, type_name(co->type));
if (oi->contentp)
*oi->contentp = xmemdupz(co->buf, co->size);
oi->whence = OI_CACHED;
@@ -746,7 +725,7 @@ static int oid_object_info_convert(struct repository *r,
{
const struct git_hash_algo *input_algo = &hash_algos[input_oid->algo];
int do_die = flags & OBJECT_INFO_DIE_IF_CORRUPT;
- struct strbuf type_name = STRBUF_INIT;
+ enum object_type type;
struct object_id oid, delta_base_oid;
struct object_info new_oi, *oi;
unsigned long size;
@@ -772,7 +751,7 @@ static int oid_object_info_convert(struct repository *r,
if (input_oi->sizep || input_oi->contentp) {
new_oi.contentp = &content;
new_oi.sizep = &size;
- new_oi.type_name = &type_name;
+ new_oi.typep = &type;
}
oi = &new_oi;
}
@@ -785,12 +764,7 @@ static int oid_object_info_convert(struct repository *r,
if (new_oi.contentp) {
struct strbuf outbuf = STRBUF_INIT;
- enum object_type type;
- type = type_from_string_gently(type_name.buf, type_name.len,
- !do_die);
- if (type == -1)
- return -1;
if (type != OBJ_BLOB) {
ret = convert_object_file(the_repository, &outbuf,
the_hash_algo, input_algo,
@@ -807,10 +781,8 @@ static int oid_object_info_convert(struct repository *r,
*input_oi->contentp = content;
else
free(content);
- if (input_oi->type_name)
- *input_oi->type_name = type_name;
- else
- strbuf_release(&type_name);
+ if (input_oi->typep)
+ *input_oi->typep = type;
}
if (new_oi.delta_base_oid == &delta_base_oid) {
if (repo_oid_to_algop(r, &delta_base_oid, input_algo,
@@ -866,7 +838,7 @@ int pretend_object_file(struct repository *repo,
char *co_buf;
hash_object_file(repo->hash_algo, buf, len, type, oid);
- if (repo_has_object_file_with_flags(repo, oid, OBJECT_INFO_QUICK | OBJECT_INFO_SKIP_FETCH_OBJECT) ||
+ if (has_object(repo, oid, 0) ||
find_cached_object(repo->objects, oid))
return 0;
@@ -956,27 +928,16 @@ void *read_object_with_reference(struct repository *r,
int has_object(struct repository *r, const struct object_id *oid,
unsigned flags)
{
- int quick = !(flags & HAS_OBJECT_RECHECK_PACKED);
- unsigned object_info_flags = OBJECT_INFO_SKIP_FETCH_OBJECT |
- (quick ? OBJECT_INFO_QUICK : 0);
-
- if (!startup_info->have_repository)
- return 0;
- return oid_object_info_extended(r, oid, NULL, object_info_flags) >= 0;
-}
+ unsigned object_info_flags = 0;
-int repo_has_object_file_with_flags(struct repository *r,
- const struct object_id *oid, int flags)
-{
if (!startup_info->have_repository)
return 0;
- return oid_object_info_extended(r, oid, NULL, flags) >= 0;
-}
+ if (!(flags & HAS_OBJECT_RECHECK_PACKED))
+ object_info_flags |= OBJECT_INFO_QUICK;
+ if (!(flags & HAS_OBJECT_FETCH_PROMISOR))
+ object_info_flags |= OBJECT_INFO_SKIP_FETCH_OBJECT;
-int repo_has_object_file(struct repository *r,
- const struct object_id *oid)
-{
- return repo_has_object_file_with_flags(r, oid, 0);
+ return oid_object_info_extended(r, oid, NULL, object_info_flags) >= 0;
}
void assert_oid_type(const struct object_id *oid, enum object_type expect)
@@ -1017,8 +978,7 @@ void raw_object_store_clear(struct raw_object_store *o)
{
FREE_AND_NULL(o->alternate_db);
- oidmap_free(o->replace_map, 1);
- FREE_AND_NULL(o->replace_map);
+ oidmap_clear(&o->replace_map, 1);
pthread_mutex_destroy(&o->replace_mutex);
free_commit_graph(o->commit_graph);
diff --git a/object-store.h b/object-store.h
index 46961dc954..c589008535 100644
--- a/object-store.h
+++ b/object-store.h
@@ -5,6 +5,7 @@
#include "object.h"
#include "list.h"
#include "oidset.h"
+#include "oidmap.h"
#include "thread-utils.h"
struct oidmap;
@@ -82,75 +83,8 @@ struct object_directory *set_temporary_primary_odb(const char *dir, int will_des
*/
void restore_primary_odb(struct object_directory *restore_odb, const char *old_path);
-/*
- * Populate and return the loose object cache array corresponding to the
- * given object ID.
- */
-struct oidtree *odb_loose_cache(struct object_directory *odb,
- const struct object_id *oid);
-
-/* Empty the loose object cache for the specified object directory. */
-void odb_clear_loose_cache(struct object_directory *odb);
-
-struct packed_git {
- struct hashmap_entry packmap_ent;
- struct packed_git *next;
- struct list_head mru;
- struct pack_window *windows;
- off_t pack_size;
- const void *index_data;
- size_t index_size;
- uint32_t num_objects;
- size_t crc_offset;
- struct oidset bad_objects;
- int index_version;
- time_t mtime;
- int pack_fd;
- int index; /* for builtin/pack-objects.c */
- unsigned pack_local:1,
- pack_keep:1,
- pack_keep_in_core:1,
- freshened:1,
- do_not_close:1,
- pack_promisor:1,
- multi_pack_index:1,
- is_cruft:1;
- unsigned char hash[GIT_MAX_RAWSZ];
- struct revindex_entry *revindex;
- const uint32_t *revindex_data;
- const uint32_t *revindex_map;
- size_t revindex_size;
- /*
- * mtimes_map points at the beginning of the memory mapped region of
- * this pack's corresponding .mtimes file, and mtimes_size is the size
- * of that .mtimes file
- */
- const uint32_t *mtimes_map;
- size_t mtimes_size;
-
- /* repo denotes the repository this packfile belongs to */
- struct repository *repo;
-
- /* something like ".git/objects/pack/xxxxx.pack" */
- char pack_name[FLEX_ARRAY]; /* more */
-};
-
+struct packed_git;
struct multi_pack_index;
-
-static inline int pack_map_entry_cmp(const void *cmp_data UNUSED,
- const struct hashmap_entry *entry,
- const struct hashmap_entry *entry2,
- const void *keydata)
-{
- const char *key = keydata;
- const struct packed_git *pg1, *pg2;
-
- pg1 = container_of(entry, const struct packed_git, packmap_ent);
- pg2 = container_of(entry2, const struct packed_git, packmap_ent);
-
- return strcmp(pg1->pack_name, key ? key : pg2->pack_name);
-}
-
struct cached_object_entry;
struct raw_object_store {
@@ -176,7 +110,7 @@ struct raw_object_store {
* Objects that should be substituted by other objects
* (see git-replace(1)).
*/
- struct oidmap *replace_map;
+ struct oidmap replace_map;
unsigned replace_map_initialized : 1;
pthread_mutex_t replace_mutex; /* protect object replace functions */
@@ -246,23 +180,6 @@ void raw_object_store_clear(struct raw_object_store *o);
*/
int odb_mkstemp(struct strbuf *temp_filename, const char *pattern);
-/*
- * Create a pack .keep file named "name" (which should generally be the output
- * of odb_pack_name). Returns a file descriptor opened for writing, or -1 on
- * error.
- */
-int odb_pack_keep(const char *name);
-
-/*
- * Put in `buf` the name of the file in the local object database that
- * would be used to store a loose object with the specified oid.
- */
-const char *loose_object_path(struct repository *r, struct strbuf *buf,
- const struct object_id *oid);
-
-void *map_loose_object(struct repository *r, const struct object_id *oid,
- unsigned long *size);
-
void *repo_read_object_file(struct repository *r,
const struct object_id *oid,
enum object_type *type,
@@ -271,10 +188,6 @@ void *repo_read_object_file(struct repository *r,
/* Read and unpack an object file into memory, write memory to an object file */
int oid_object_info(struct repository *r, const struct object_id *, unsigned long *);
-void hash_object_file(const struct git_hash_algo *algo, const void *buf,
- unsigned long len, enum object_type type,
- struct object_id *oid);
-
/*
* Add an object file to the in-memory object store, without writing it
* to disk.
@@ -293,7 +206,6 @@ struct object_info {
unsigned long *sizep;
off_t *disk_sizep;
struct object_id *delta_base_oid;
- struct strbuf *type_name;
void **contentp;
/* Response */
@@ -328,8 +240,6 @@ struct object_info {
/* Invoke lookup_replace_object() on the given hash */
#define OBJECT_INFO_LOOKUP_REPLACE 1
-/* Allow reading from a loose object file of unknown/bogus type */
-#define OBJECT_INFO_ALLOW_UNKNOWN_TYPE 2
/* Do not retry packed storage after checking packed and loose storage */
#define OBJECT_INFO_QUICK 8
/*
@@ -350,33 +260,20 @@ int oid_object_info_extended(struct repository *r,
const struct object_id *,
struct object_info *, unsigned flags);
-/* Retry packed storage after checking packed and loose storage */
-#define HAS_OBJECT_RECHECK_PACKED 1
+enum {
+ /* Retry packed storage after checking packed and loose storage */
+ HAS_OBJECT_RECHECK_PACKED = (1 << 0),
+ /* Allow fetching the object in case the repository has a promisor remote. */
+ HAS_OBJECT_FETCH_PROMISOR = (1 << 1),
+};
/*
* Returns 1 if the object exists. This function will not lazily fetch objects
- * in a partial clone.
+ * in a partial clone by default.
*/
int has_object(struct repository *r, const struct object_id *oid,
unsigned flags);
-/*
- * These macros and functions are deprecated. If checking existence for an
- * object that is likely to be missing and/or whose absence is relatively
- * inconsequential (or is consequential but the caller is prepared to handle
- * it), use has_object(), which has better defaults (no lazy fetch in a partial
- * clone and no rechecking of packed storage). In the unlikely event that a
- * caller needs to assert existence of an object that it fully expects to
- * exist, and wants to trigger a lazy fetch in a partial clone, use
- * oid_object_info_extended() with a NULL struct object_info.
- *
- * These functions can be removed once all callers have migrated to
- * has_object() and/or oid_object_info_extended().
- */
-int repo_has_object_file(struct repository *r, const struct object_id *oid);
-int repo_has_object_file_with_flags(struct repository *r,
- const struct object_id *oid, int flags);
-
void assert_oid_type(const struct object_id *oid, enum object_type expect);
/*
@@ -411,56 +308,7 @@ static inline void obj_read_unlock(void)
if(obj_read_use_lock)
pthread_mutex_unlock(&obj_read_mutex);
}
-
-/*
- * Iterate over the files in the loose-object parts of the object
- * directory "path", triggering the following callbacks:
- *
- * - loose_object is called for each loose object we find.
- *
- * - loose_cruft is called for any files that do not appear to be
- * loose objects. Note that we only look in the loose object
- * directories "objects/[0-9a-f]{2}/", so we will not report
- * "objects/foobar" as cruft.
- *
- * - loose_subdir is called for each top-level hashed subdirectory
- * of the object directory (e.g., "$OBJDIR/f0"). It is called
- * after the objects in the directory are processed.
- *
- * Any callback that is NULL will be ignored. Callbacks returning non-zero
- * will end the iteration.
- *
- * In the "buf" variant, "path" is a strbuf which will also be used as a
- * scratch buffer, but restored to its original contents before
- * the function returns.
- */
-typedef int each_loose_object_fn(const struct object_id *oid,
- const char *path,
- void *data);
-typedef int each_loose_cruft_fn(const char *basename,
- const char *path,
- void *data);
-typedef int each_loose_subdir_fn(unsigned int nr,
- const char *path,
- void *data);
-int for_each_file_in_obj_subdir(unsigned int subdir_nr,
- struct strbuf *path,
- each_loose_object_fn obj_cb,
- each_loose_cruft_fn cruft_cb,
- each_loose_subdir_fn subdir_cb,
- void *data);
-int for_each_loose_file_in_objdir(const char *path,
- each_loose_object_fn obj_cb,
- each_loose_cruft_fn cruft_cb,
- each_loose_subdir_fn subdir_cb,
- void *data);
-int for_each_loose_file_in_objdir_buf(struct strbuf *path,
- each_loose_object_fn obj_cb,
- each_loose_cruft_fn cruft_cb,
- each_loose_subdir_fn subdir_cb,
- void *data);
-
-/* Flags for for_each_*_object() below. */
+/* Flags for for_each_*_object(). */
enum for_each_object_flags {
/* Iterate only over local objects, not alternates. */
FOR_EACH_OBJECT_LOCAL_ONLY = (1<<0),
@@ -480,33 +328,6 @@ enum for_each_object_flags {
FOR_EACH_OBJECT_SKIP_ON_DISK_KEPT_PACKS = (1<<4),
};
-/*
- * Iterate over all accessible loose objects without respect to
- * reachability. By default, this includes both local and alternate objects.
- * The order in which objects are visited is unspecified.
- *
- * Any flags specific to packs are ignored.
- */
-int for_each_loose_object(each_loose_object_fn, void *,
- enum for_each_object_flags flags);
-
-/*
- * Iterate over all accessible packed objects without respect to reachability.
- * By default, this includes both local and alternate packs.
- *
- * Note that some objects may appear twice if they are found in multiple packs.
- * Each pack is visited in an unspecified order. By default, objects within a
- * pack are visited in pack-idx order (i.e., sorted by oid).
- */
-typedef int each_packed_object_fn(const struct object_id *oid,
- struct packed_git *pack,
- uint32_t pos,
- void *data);
-int for_each_object_in_pack(struct packed_git *p,
- each_packed_object_fn, void *data,
- enum for_each_object_flags flags);
-int for_each_packed_object(struct repository *repo, each_packed_object_fn cb,
- void *data, enum for_each_object_flags flags);
void *read_object_with_reference(struct repository *r,
const struct object_id *oid,
diff --git a/oidmap.c b/oidmap.c
index 8b1bc4dec9..508d6c7dec 100644
--- a/oidmap.c
+++ b/oidmap.c
@@ -22,7 +22,7 @@ void oidmap_init(struct oidmap *map, size_t initial_size)
hashmap_init(&map->map, oidmap_neq, NULL, initial_size);
}
-void oidmap_free(struct oidmap *map, int free_entries)
+void oidmap_clear(struct oidmap *map, int free_entries)
{
if (!map)
return;
diff --git a/oidmap.h b/oidmap.h
index fad412827a..67fb32290f 100644
--- a/oidmap.h
+++ b/oidmap.h
@@ -36,12 +36,13 @@ struct oidmap {
void oidmap_init(struct oidmap *map, size_t initial_size);
/*
- * Frees an oidmap structure and allocated memory.
+ * Clear an oidmap, freeing any allocated memory. The map is empty and
+ * can be reused without another explicit init.
*
* If `free_entries` is true, each oidmap_entry in the map is freed as well
* using stdlibs free().
*/
-void oidmap_free(struct oidmap *map, int free_entries);
+void oidmap_clear(struct oidmap *map, int free_entries);
/*
* Returns the oidmap entry for the specified oid, or NULL if not found.
@@ -66,6 +67,10 @@ void *oidmap_put(struct oidmap *map, void *entry);
*/
void *oidmap_remove(struct oidmap *map, const struct object_id *key);
+static inline unsigned int oidmap_get_size(struct oidmap *map)
+{
+ return hashmap_get_size(&map->map);
+}
struct oidmap_iter {
struct hashmap_iter h_iter;
diff --git a/pack-bitmap.c b/pack-bitmap.c
index b9f1d86604..ac6d62b980 100644
--- a/pack-bitmap.c
+++ b/pack-bitmap.c
@@ -388,10 +388,6 @@ static int load_bitmap_entries_v1(struct bitmap_index *index)
return error(_("corrupt ewah bitmap: commit index %u out of range"),
(unsigned)commit_idx_pos);
- bitmap = read_bitmap_1(index);
- if (!bitmap)
- return -1;
-
if (xor_offset > MAX_XOR_OFFSET || xor_offset > i)
return error(_("corrupted bitmap pack index"));
@@ -402,6 +398,10 @@ static int load_bitmap_entries_v1(struct bitmap_index *index)
return error(_("invalid XOR offset in bitmap pack index"));
}
+ bitmap = read_bitmap_1(index);
+ if (!bitmap)
+ return -1;
+
recent_bitmaps[i % MAX_XOR_OFFSET] = store_bitmap(
index, bitmap, &oid, xor_bitmap, flags);
}
diff --git a/pack-objects.h b/pack-objects.h
index d1c4ae7f9b..475a2d67ce 100644
--- a/pack-objects.h
+++ b/pack-objects.h
@@ -4,6 +4,7 @@
#include "object-store.h"
#include "thread-utils.h"
#include "pack.h"
+#include "packfile.h"
struct repository;
diff --git a/packfile.c b/packfile.c
index d91016f1c7..80e35f1032 100644
--- a/packfile.c
+++ b/packfile.c
@@ -1598,17 +1598,12 @@ int packed_object_info(struct repository *r, struct packed_git *p,
*oi->disk_sizep = pack_pos_to_offset(p, pos + 1) - obj_offset;
}
- if (oi->typep || oi->type_name) {
+ if (oi->typep) {
enum object_type ptot;
ptot = packed_to_object_type(r, p, obj_offset,
type, &w_curs, curpos);
if (oi->typep)
*oi->typep = ptot;
- if (oi->type_name) {
- const char *tn = type_name(ptot);
- if (tn)
- strbuf_addstr(oi->type_name, tn);
- }
if (ptot < 0) {
type = OBJ_BAD;
goto out;
diff --git a/packfile.h b/packfile.h
index 25097213d0..3a3c77cf05 100644
--- a/packfile.h
+++ b/packfile.h
@@ -1,13 +1,71 @@
#ifndef PACKFILE_H
#define PACKFILE_H
+#include "list.h"
#include "object.h"
+#include "object-store.h"
#include "oidset.h"
/* in object-store.h */
-struct packed_git;
struct object_info;
+struct packed_git {
+ struct hashmap_entry packmap_ent;
+ struct packed_git *next;
+ struct list_head mru;
+ struct pack_window *windows;
+ off_t pack_size;
+ const void *index_data;
+ size_t index_size;
+ uint32_t num_objects;
+ size_t crc_offset;
+ struct oidset bad_objects;
+ int index_version;
+ time_t mtime;
+ int pack_fd;
+ int index; /* for builtin/pack-objects.c */
+ unsigned pack_local:1,
+ pack_keep:1,
+ pack_keep_in_core:1,
+ freshened:1,
+ do_not_close:1,
+ pack_promisor:1,
+ multi_pack_index:1,
+ is_cruft:1;
+ unsigned char hash[GIT_MAX_RAWSZ];
+ struct revindex_entry *revindex;
+ const uint32_t *revindex_data;
+ const uint32_t *revindex_map;
+ size_t revindex_size;
+ /*
+ * mtimes_map points at the beginning of the memory mapped region of
+ * this pack's corresponding .mtimes file, and mtimes_size is the size
+ * of that .mtimes file
+ */
+ const uint32_t *mtimes_map;
+ size_t mtimes_size;
+
+ /* repo denotes the repository this packfile belongs to */
+ struct repository *repo;
+
+ /* something like ".git/objects/pack/xxxxx.pack" */
+ char pack_name[FLEX_ARRAY]; /* more */
+};
+
+static inline int pack_map_entry_cmp(const void *cmp_data UNUSED,
+ const struct hashmap_entry *entry,
+ const struct hashmap_entry *entry2,
+ const void *keydata)
+{
+ const char *key = keydata;
+ const struct packed_git *pg1, *pg2;
+
+ pg1 = container_of(entry, const struct packed_git, packmap_ent);
+ pg2 = container_of(entry2, const struct packed_git, packmap_ent);
+
+ return strcmp(pg1->pack_name, key ? key : pg2->pack_name);
+}
+
struct pack_window {
struct pack_window *next;
unsigned char *base;
@@ -60,6 +118,24 @@ void for_each_file_in_pack_dir(const char *objdir,
each_file_in_pack_dir_fn fn,
void *data);
+/*
+ * Iterate over all accessible packed objects without respect to reachability.
+ * By default, this includes both local and alternate packs.
+ *
+ * Note that some objects may appear twice if they are found in multiple packs.
+ * Each pack is visited in an unspecified order. By default, objects within a
+ * pack are visited in pack-idx order (i.e., sorted by oid).
+ */
+typedef int each_packed_object_fn(const struct object_id *oid,
+ struct packed_git *pack,
+ uint32_t pos,
+ void *data);
+int for_each_object_in_pack(struct packed_git *p,
+ each_packed_object_fn, void *data,
+ enum for_each_object_flags flags);
+int for_each_packed_object(struct repository *repo, each_packed_object_fn cb,
+ void *data, enum for_each_object_flags flags);
+
/* A hook to report invalid files in pack directory */
#define PACKDIR_FILE_PACK 1
#define PACKDIR_FILE_IDX 2
diff --git a/path.c b/path.c
index 4505bb78e8..3b598b2847 100644
--- a/path.c
+++ b/path.c
@@ -1011,6 +1011,20 @@ enum scld_error safe_create_leading_directories_const(struct repository *repo,
return result;
}
+int safe_create_file_with_leading_directories(struct repository *repo,
+ const char *path)
+{
+ int fd;
+
+ fd = open(path, O_RDWR|O_CREAT|O_EXCL, 0600);
+ if (0 <= fd)
+ return fd;
+
+ /* slow path */
+ safe_create_leading_directories_const(repo, path);
+ return open(path, O_RDWR|O_CREAT|O_EXCL, 0600);
+}
+
static int have_same_root(const char *path1, const char *path2)
{
int is_abs1, is_abs2;
diff --git a/path.h b/path.h
index fd1a194b06..e67348f253 100644
--- a/path.h
+++ b/path.h
@@ -266,6 +266,13 @@ enum scld_error safe_create_leading_directories_const(struct repository *repo,
const char *path);
enum scld_error safe_create_leading_directories_no_share(char *path);
+/*
+ * Create a file, potentially creating its leading directories in case they
+ * don't exist. Returns the return value of the open(3p) call.
+ */
+int safe_create_file_with_leading_directories(struct repository *repo,
+ const char *path);
+
# ifdef USE_THE_REPOSITORY_VARIABLE
# include "strbuf.h"
# include "repository.h"
diff --git a/perl/FromCPAN/Mail/meson.build b/perl/FromCPAN/Mail/meson.build
index b4ff2fc0b2..467507c5e6 100644
--- a/perl/FromCPAN/Mail/meson.build
+++ b/perl/FromCPAN/Mail/meson.build
@@ -3,6 +3,6 @@ test_dependencies += custom_target(
output: 'Address.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/FromCPAN/Mail',
+ install_dir: perllibdir / 'FromCPAN/Mail',
depends: [git_version_file],
)
diff --git a/perl/FromCPAN/meson.build b/perl/FromCPAN/meson.build
index 1f9ea6ce8e..720c60283d 100644
--- a/perl/FromCPAN/meson.build
+++ b/perl/FromCPAN/meson.build
@@ -3,7 +3,7 @@ test_dependencies += custom_target(
output: 'Error.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/FromCPAN',
+ install_dir: perllibdir / 'FromCPAN',
depends: [git_version_file],
)
diff --git a/perl/Git/LoadCPAN/Mail/meson.build b/perl/Git/LoadCPAN/Mail/meson.build
index 89cde56be8..05a5770560 100644
--- a/perl/Git/LoadCPAN/Mail/meson.build
+++ b/perl/Git/LoadCPAN/Mail/meson.build
@@ -3,6 +3,6 @@ test_dependencies += custom_target(
output: 'Address.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/Git/LoadCPAN/Mail',
+ install_dir: perllibdir / 'Git/LoadCPAN/Mail',
depends: [git_version_file],
)
diff --git a/perl/Git/LoadCPAN/meson.build b/perl/Git/LoadCPAN/meson.build
index 1ee915c650..b975d49726 100644
--- a/perl/Git/LoadCPAN/meson.build
+++ b/perl/Git/LoadCPAN/meson.build
@@ -3,7 +3,7 @@ test_dependencies += custom_target(
output: 'Error.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/Git/LoadCPAN',
+ install_dir: perllibdir / 'Git/LoadCPAN',
depends: [git_version_file],
)
diff --git a/perl/Git/SVN/Memoize/meson.build b/perl/Git/SVN/Memoize/meson.build
index 233ec670d7..4c589b30c3 100644
--- a/perl/Git/SVN/Memoize/meson.build
+++ b/perl/Git/SVN/Memoize/meson.build
@@ -3,6 +3,6 @@ test_dependencies += custom_target(
output: 'YAML.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/Git/SVN',
+ install_dir: perllibdir / 'Git/SVN',
depends: [git_version_file],
)
diff --git a/perl/Git/SVN/meson.build b/perl/Git/SVN/meson.build
index 44abaf42b7..8858985fe8 100644
--- a/perl/Git/SVN/meson.build
+++ b/perl/Git/SVN/meson.build
@@ -13,7 +13,7 @@ foreach source : [
output: source,
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/Git/SVN',
+ install_dir: perllibdir / 'Git/SVN',
depends: [git_version_file],
)
endforeach
diff --git a/perl/Git/meson.build b/perl/Git/meson.build
index b21fa5591e..a61b7b1f4a 100644
--- a/perl/Git/meson.build
+++ b/perl/Git/meson.build
@@ -10,7 +10,7 @@ foreach source : [
output: source,
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5/Git',
+ install_dir: perllibdir / 'Git',
depends: [git_version_file],
)
endforeach
diff --git a/perl/meson.build b/perl/meson.build
index 2d4ab1c4a9..3c66b007ea 100644
--- a/perl/meson.build
+++ b/perl/meson.build
@@ -3,7 +3,7 @@ test_dependencies += custom_target(
output: 'Git.pm',
command: generate_perl_command,
install: true,
- install_dir: get_option('datadir') / 'perl5',
+ install_dir: perllibdir,
depends: [git_version_file],
)
diff --git a/prune-packed.c b/prune-packed.c
index c1d95a519d..92fb4fbb0e 100644
--- a/prune-packed.c
+++ b/prune-packed.c
@@ -2,7 +2,7 @@
#include "git-compat-util.h"
#include "gettext.h"
-#include "object-store.h"
+#include "object-file.h"
#include "packfile.h"
#include "progress.h"
#include "prune-packed.h"
diff --git a/reachable.c b/reachable.c
index e5f56f4018..9dc748f0b9 100644
--- a/reachable.c
+++ b/reachable.c
@@ -14,7 +14,7 @@
#include "list-objects.h"
#include "packfile.h"
#include "worktree.h"
-#include "object-store.h"
+#include "object-file.h"
#include "pack-bitmap.h"
#include "pack-mtimes.h"
#include "config.h"
diff --git a/reflog.c b/reflog.c
index 12f7a02e34..15d81ebea9 100644
--- a/reflog.c
+++ b/reflog.c
@@ -152,7 +152,8 @@ static int tree_is_complete(const struct object_id *oid)
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
complete = 1;
while (tree_entry(&desc, &entry)) {
- if (!repo_has_object_file(the_repository, &entry.oid) ||
+ if (!has_object(the_repository, &entry.oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) ||
(S_ISDIR(entry.mode) && !tree_is_complete(&entry.oid))) {
tree->object.flags |= INCOMPLETE;
complete = 0;
diff --git a/refs.c b/refs.c
index 6559db3789..dce5c49ca2 100644
--- a/refs.c
+++ b/refs.c
@@ -376,7 +376,7 @@ int ref_resolves_to_object(const char *refname,
{
if (flags & REF_ISBROKEN)
return 0;
- if (!repo_has_object_file(repo, oid)) {
+ if (!has_object(repo, oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
error(_("%s does not point to a valid object!"), refname);
return 0;
}
diff --git a/reftable/block.c b/reftable/block.c
index 471faa1642..920b3f4486 100644
--- a/reftable/block.c
+++ b/reftable/block.c
@@ -227,7 +227,8 @@ static int read_block(struct reftable_block_source *source,
int reftable_block_init(struct reftable_block *block,
struct reftable_block_source *source,
uint32_t offset, uint32_t header_size,
- uint32_t table_block_size, uint32_t hash_size)
+ uint32_t table_block_size, uint32_t hash_size,
+ uint8_t want_type)
{
uint32_t guess_block_size = table_block_size ?
table_block_size : DEFAULT_BLOCK_SIZE;
@@ -247,6 +248,10 @@ int reftable_block_init(struct reftable_block *block,
err = REFTABLE_FORMAT_ERROR;
goto done;
}
+ if (want_type != REFTABLE_BLOCK_TYPE_ANY && block_type != want_type) {
+ err = 1;
+ goto done;
+ }
block_size = reftable_get_be24(block->block_data.data + header_size + 1);
if (block_size > guess_block_size) {
diff --git a/reftable/reftable-block.h b/reftable/reftable-block.h
index 04c3b518c8..0b05a8f7e3 100644
--- a/reftable/reftable-block.h
+++ b/reftable/reftable-block.h
@@ -56,7 +56,8 @@ struct reftable_block {
int reftable_block_init(struct reftable_block *b,
struct reftable_block_source *source,
uint32_t offset, uint32_t header_size,
- uint32_t table_block_size, uint32_t hash_size);
+ uint32_t table_block_size, uint32_t hash_size,
+ uint8_t want_type);
/* Release resources allocated by the block. */
void reftable_block_release(struct reftable_block *b);
diff --git a/reftable/table.c b/reftable/table.c
index ee83127615..56362df0ed 100644
--- a/reftable/table.c
+++ b/reftable/table.c
@@ -173,16 +173,7 @@ int table_init_block(struct reftable_table *t, struct reftable_block *block,
return 1;
err = reftable_block_init(block, &t->source, next_off, header_off,
- t->block_size, hash_size(t->hash_id));
- if (err < 0)
- goto done;
-
- if (want_typ != REFTABLE_BLOCK_TYPE_ANY && block->block_type != want_typ) {
- err = 1;
- goto done;
- }
-
-done:
+ t->block_size, hash_size(t->hash_id), want_typ);
if (err)
reftable_block_release(block);
return err;
diff --git a/reftable/writer.c b/reftable/writer.c
index cb16f71be4..3b4ebdd6dc 100644
--- a/reftable/writer.c
+++ b/reftable/writer.c
@@ -57,8 +57,10 @@ static int padded_write(struct reftable_writer *w, uint8_t *data, size_t len,
return -1;
n = w->write(w->write_arg, zeroed, w->pending_padding);
- if (n < 0)
+ if (n < 0) {
+ reftable_free(zeroed);
return n;
+ }
w->pending_padding = 0;
reftable_free(zeroed);
@@ -256,8 +258,10 @@ static int writer_index_hash(struct reftable_writer *w, struct reftable_buf *has
reftable_buf_reset(&key->hash);
err = reftable_buf_add(&key->hash, hash->buf, hash->len);
- if (err < 0)
+ if (err < 0) {
+ reftable_free(key);
return err;
+ }
tree_insert(&w->obj_index_tree, key,
&obj_index_tree_node_compare);
} else {
diff --git a/remote.c b/remote.c
index 9fa3614e7a..4099183cac 100644
--- a/remote.c
+++ b/remote.c
@@ -1702,7 +1702,7 @@ void set_ref_status_for_push(struct ref *remote_refs, int send_mirror,
if (!reject_reason && !ref->deletion && !is_null_oid(&ref->old_oid)) {
if (starts_with(ref->name, "refs/tags/"))
reject_reason = REF_STATUS_REJECT_ALREADY_EXISTS;
- else if (!repo_has_object_file_with_flags(the_repository, &ref->old_oid, OBJECT_INFO_SKIP_FETCH_OBJECT))
+ else if (!has_object(the_repository, &ref->old_oid, HAS_OBJECT_RECHECK_PACKED))
reject_reason = REF_STATUS_REJECT_FETCH_FIRST;
else if (!lookup_commit_reference_gently(the_repository, &ref->old_oid, 1) ||
!lookup_commit_reference_gently(the_repository, &ref->new_oid, 1))
diff --git a/replace-object.c b/replace-object.c
index 7b8a09b5cb..f8c5f68837 100644
--- a/replace-object.c
+++ b/replace-object.c
@@ -31,7 +31,7 @@ static int register_replace_ref(const char *refname,
oidcpy(&repl_obj->replacement, oid);
/* Register new object */
- if (oidmap_put(r->objects->replace_map, repl_obj))
+ if (oidmap_put(&r->objects->replace_map, repl_obj))
die(_("duplicate replace ref: %s"), refname);
return 0;
@@ -48,9 +48,7 @@ void prepare_replace_object(struct repository *r)
return;
}
- r->objects->replace_map =
- xmalloc(sizeof(*r->objects->replace_map));
- oidmap_init(r->objects->replace_map, 0);
+ oidmap_init(&r->objects->replace_map, 0);
refs_for_each_replace_ref(get_main_ref_store(r),
register_replace_ref, r);
@@ -80,7 +78,7 @@ const struct object_id *do_lookup_replace_object(struct repository *r,
/* Try to recursively replace the object */
while (depth-- > 0) {
struct replace_object *repl_obj =
- oidmap_get(r->objects->replace_map, cur);
+ oidmap_get(&r->objects->replace_map, cur);
if (!repl_obj)
return cur;
cur = &repl_obj->replacement;
diff --git a/replace-object.h b/replace-object.h
index ba478eb30c..3052e96a62 100644
--- a/replace-object.h
+++ b/replace-object.h
@@ -47,7 +47,7 @@ static inline const struct object_id *lookup_replace_object(struct repository *r
{
if (!replace_refs_enabled(r) ||
(r->objects->replace_map_initialized &&
- r->objects->replace_map->map.tablesize == 0))
+ oidmap_get_size(&r->objects->replace_map) == 0))
return oid;
return do_lookup_replace_object(r, oid);
}
diff --git a/scalar.c b/scalar.c
index d359f08bb8..355baf75e4 100644
--- a/scalar.c
+++ b/scalar.c
@@ -209,6 +209,12 @@ static int set_recommended_config(int reconfigure)
return 0;
}
+/**
+ * Enable or disable the maintenance mode for the current repository:
+ *
+ * * If 'enable' is nonzero, run 'git maintenance start'.
+ * * If 'enable' is zero, run 'git maintenance unregister --force'.
+ */
static int toggle_maintenance(int enable)
{
return run_git("maintenance",
@@ -259,7 +265,15 @@ static int stop_fsmonitor_daemon(void)
return 0;
}
-static int register_dir(void)
+/**
+ * Register the current directory as a Scalar enlistment, and set the
+ * recommended configuration.
+ *
+ * * If 'maintenance' is non-zero, then enable background maintenance.
+ * * If 'maintenance' is zero, then leave background maintenance as it is
+ * currently configured.
+ */
+static int register_dir(int maintenance)
{
if (add_or_remove_enlistment(1))
return error(_("could not add enlistment"));
@@ -267,8 +281,9 @@ static int register_dir(void)
if (set_recommended_config(0))
return error(_("could not set recommended config"));
- if (toggle_maintenance(1))
- warning(_("could not turn on maintenance"));
+ if (maintenance &&
+ toggle_maintenance(maintenance))
+ warning(_("could not toggle maintenance"));
if (have_fsmonitor_support() && start_fsmonitor_daemon()) {
return error(_("could not start the FSMonitor daemon"));
@@ -411,7 +426,7 @@ static int cmd_clone(int argc, const char **argv)
const char *branch = NULL;
char *branch_to_free = NULL;
int full_clone = 0, single_branch = 0, show_progress = isatty(2);
- int src = 1, tags = 1;
+ int src = 1, tags = 1, maintenance = 1;
struct option clone_options[] = {
OPT_STRING('b', "branch", &branch, N_("<branch>"),
N_("branch to checkout after clone")),
@@ -424,11 +439,13 @@ static int cmd_clone(int argc, const char **argv)
N_("create repository within 'src' directory")),
OPT_BOOL(0, "tags", &tags,
N_("specify if tags should be fetched during clone")),
+ OPT_BOOL(0, "maintenance", &maintenance,
+ N_("specify if background maintenance should be enabled")),
OPT_END(),
};
const char * const clone_usage[] = {
N_("scalar clone [--single-branch] [--branch <main-branch>] [--full-clone]\n"
- "\t[--[no-]src] [--[no-]tags] <url> [<enlistment>]"),
+ "\t[--[no-]src] [--[no-]tags] [--[no-]maintenance] <url> [<enlistment>]"),
NULL
};
const char *url;
@@ -550,7 +567,8 @@ static int cmd_clone(int argc, const char **argv)
if (res)
goto cleanup;
- res = register_dir();
+ /* If --no-maintenance, then skip maintenance command entirely. */
+ res = register_dir(maintenance);
cleanup:
free(branch_to_free);
@@ -597,11 +615,14 @@ static int cmd_list(int argc, const char **argv UNUSED)
static int cmd_register(int argc, const char **argv)
{
+ int maintenance = 1;
struct option options[] = {
+ OPT_BOOL(0, "maintenance", &maintenance,
+ N_("specify if background maintenance should be enabled")),
OPT_END(),
};
const char * const usage[] = {
- N_("scalar register [<enlistment>]"),
+ N_("scalar register [--[no-]maintenance] [<enlistment>]"),
NULL
};
@@ -610,7 +631,8 @@ static int cmd_register(int argc, const char **argv)
setup_enlistment_directory(argc, argv, usage, options, NULL);
- return register_dir();
+ /* If --no-maintenance, then leave maintenance as-is. */
+ return register_dir(maintenance);
}
static int get_scalar_repos(const char *key, const char *value,
@@ -646,13 +668,19 @@ static int remove_deleted_enlistment(struct strbuf *path)
static int cmd_reconfigure(int argc, const char **argv)
{
int all = 0;
+ const char *maintenance_str = NULL;
+ int maintenance = 1; /* Enable maintenance by default. */
+
struct option options[] = {
OPT_BOOL('a', "all", &all,
N_("reconfigure all registered enlistments")),
+ OPT_STRING(0, "maintenance", &maintenance_str,
+ N_("(enable|disable|keep)"),
+ N_("signal how to adjust background maintenance")),
OPT_END(),
};
const char * const usage[] = {
- N_("scalar reconfigure [--all | <enlistment>]"),
+ N_("scalar reconfigure [--maintenance=(enable|disable|keep)] [--all | <enlistment>]"),
NULL
};
struct string_list scalar_repos = STRING_LIST_INIT_DUP;
@@ -672,6 +700,18 @@ static int cmd_reconfigure(int argc, const char **argv)
usage_msg_opt(_("--all or <enlistment>, but not both"),
usage, options);
+ if (maintenance_str) {
+ if (!strcmp(maintenance_str, "enable"))
+ maintenance = 1;
+ else if (!strcmp(maintenance_str, "disable"))
+ maintenance = 0;
+ else if (!strcmp(maintenance_str, "keep"))
+ maintenance = -1;
+ else
+ die(_("unknown mode for --maintenance option: %s"),
+ maintenance_str);
+ }
+
git_config(get_scalar_repos, &scalar_repos);
for (size_t i = 0; i < scalar_repos.nr; i++) {
@@ -736,7 +776,8 @@ static int cmd_reconfigure(int argc, const char **argv)
the_repository = old_repo;
repo_clear(&r);
- if (toggle_maintenance(1) >= 0)
+ if (maintenance >= 0 &&
+ toggle_maintenance(maintenance) >= 0)
succeeded = 1;
loop_end:
@@ -803,13 +844,13 @@ static int cmd_run(int argc, const char **argv)
strbuf_release(&buf);
if (i == 0)
- return register_dir();
+ return register_dir(1);
if (i > 0)
return run_git("maintenance", "run",
"--task", tasks[i].task, NULL);
- if (register_dir())
+ if (register_dir(1))
return -1;
for (i = 1; tasks[i].arg; i++)
if (run_git("maintenance", "run",
diff --git a/send-pack.c b/send-pack.c
index 5005689cb5..86592ce526 100644
--- a/send-pack.c
+++ b/send-pack.c
@@ -45,10 +45,7 @@ int option_parse_push_signed(const struct option *opt,
static void feed_object(struct repository *r,
const struct object_id *oid, FILE *fh, int negative)
{
- if (negative &&
- !repo_has_object_file_with_flags(r, oid,
- OBJECT_INFO_SKIP_FETCH_OBJECT |
- OBJECT_INFO_QUICK))
+ if (negative && !has_object(r, oid, 0))
return;
if (negative)
diff --git a/sequencer.c b/sequencer.c
index b5c4043757..eacede6bf2 100644
--- a/sequencer.c
+++ b/sequencer.c
@@ -225,11 +225,6 @@ struct replay_ctx {
*/
struct strbuf current_fixups;
/*
- * Stores the reflog message that will be used when creating a
- * commit. Points to a static buffer and should not be free()'d.
- */
- const char *reflog_message;
- /*
* The number of completed fixup and squash commands in the
* current chain.
*/
@@ -1124,10 +1119,10 @@ static int run_command_silent_on_success(struct child_process *cmd)
* author metadata.
*/
static int run_git_commit(const char *defmsg,
+ const char *reflog_action,
struct replay_opts *opts,
unsigned int flags)
{
- struct replay_ctx *ctx = opts->ctx;
struct child_process cmd = CHILD_PROCESS_INIT;
if ((flags & CLEANUP_MSG) && (flags & VERBATIM_MSG))
@@ -1145,7 +1140,7 @@ static int run_git_commit(const char *defmsg,
gpg_opt, gpg_opt);
}
- strvec_pushf(&cmd.env, GIT_REFLOG_ACTION "=%s", ctx->reflog_message);
+ strvec_pushf(&cmd.env, GIT_REFLOG_ACTION "=%s", reflog_action);
if (opts->committer_date_is_author_date)
strvec_pushf(&cmd.env, "GIT_COMMITTER_DATE=%s",
@@ -1529,10 +1524,10 @@ static int parse_head(struct repository *r, struct commit **head)
*/
static int try_to_commit(struct repository *r,
struct strbuf *msg, const char *author,
+ const char *reflog_action,
struct replay_opts *opts, unsigned int flags,
struct object_id *oid)
{
- struct replay_ctx *ctx = opts->ctx;
struct object_id tree;
struct commit *current_head = NULL;
struct commit_list *parents = NULL;
@@ -1694,7 +1689,7 @@ static int try_to_commit(struct repository *r,
goto out;
}
- if (update_head_with_reflog(current_head, oid, ctx->reflog_message,
+ if (update_head_with_reflog(current_head, oid, reflog_action,
msg, &err)) {
res = error("%s", err.buf);
goto out;
@@ -1725,6 +1720,7 @@ static int write_rebase_head(struct object_id *oid)
static int do_commit(struct repository *r,
const char *msg_file, const char *author,
+ const char *reflog_action,
struct replay_opts *opts, unsigned int flags,
struct object_id *oid)
{
@@ -1740,7 +1736,7 @@ static int do_commit(struct repository *r,
msg_file);
res = try_to_commit(r, msg_file ? &sb : NULL,
- author, opts, flags, &oid);
+ author, reflog_action, opts, flags, &oid);
strbuf_release(&sb);
if (!res) {
refs_delete_ref(get_main_ref_store(r), "",
@@ -1756,7 +1752,7 @@ static int do_commit(struct repository *r,
if (is_rebase_i(opts) && oid)
if (write_rebase_head(oid))
return -1;
- return run_git_commit(msg_file, opts, flags);
+ return run_git_commit(msg_file, reflog_action, opts, flags);
}
return res;
@@ -2226,6 +2222,39 @@ static void refer_to_commit(struct replay_opts *opts,
}
}
+static const char *sequencer_reflog_action(struct replay_opts *opts)
+{
+ if (!opts->reflog_action) {
+ opts->reflog_action = getenv(GIT_REFLOG_ACTION);
+ opts->reflog_action =
+ xstrdup(opts->reflog_action ? opts->reflog_action
+ : action_name(opts));
+ }
+
+ return opts->reflog_action;
+}
+
+__attribute__((format (printf, 3, 4)))
+static const char *reflog_message(struct replay_opts *opts,
+ const char *sub_action, const char *fmt, ...)
+{
+ va_list ap;
+ static struct strbuf buf = STRBUF_INIT;
+
+ va_start(ap, fmt);
+ strbuf_reset(&buf);
+ strbuf_addstr(&buf, sequencer_reflog_action(opts));
+ if (sub_action)
+ strbuf_addf(&buf, " (%s)", sub_action);
+ if (fmt) {
+ strbuf_addstr(&buf, ": ");
+ strbuf_vaddf(&buf, fmt, ap);
+ }
+ va_end(ap);
+
+ return buf.buf;
+}
+
static int do_pick_commit(struct repository *r,
struct todo_item *item,
struct replay_opts *opts,
@@ -2236,13 +2265,19 @@ static int do_pick_commit(struct repository *r,
const char *msg_file = should_edit(opts) ? NULL : git_path_merge_msg(r);
struct object_id head;
struct commit *base, *next, *parent;
- const char *base_label, *next_label;
+ const char *base_label, *next_label, *reflog_action;
char *author = NULL;
struct commit_message msg = { NULL, NULL, NULL, NULL };
int res, unborn = 0, reword = 0, allow, drop_commit;
enum todo_command command = item->command;
struct commit *commit = item->commit;
+ if (is_rebase_i(opts))
+ reflog_action = reflog_message(
+ opts, command_to_string(item->command), NULL);
+ else
+ reflog_action = sequencer_reflog_action(opts);
+
if (opts->no_commit) {
/*
* We do not intend to commit immediately. We just want to
@@ -2494,7 +2529,8 @@ static int do_pick_commit(struct repository *r,
} /* else allow == 0 and there's nothing special to do */
if (!opts->no_commit && !drop_commit) {
if (author || command == TODO_REVERT || (flags & AMEND_MSG))
- res = do_commit(r, msg_file, author, opts, flags,
+ res = do_commit(r, msg_file, author, reflog_action,
+ opts, flags,
commit? &commit->object.oid : NULL);
else
res = error(_("unable to parse commit author"));
@@ -2509,7 +2545,7 @@ fast_forward_edit:
* got here.
*/
flags = EDIT_MSG | VERIFY_MSG | AMEND_MSG | ALLOW_EMPTY;
- res = run_git_commit(NULL, opts, flags);
+ res = run_git_commit(NULL, reflog_action, opts, flags);
*check_todo = 1;
}
}
@@ -3919,39 +3955,6 @@ static int do_label(struct repository *r, const char *name, int len)
return ret;
}
-static const char *sequencer_reflog_action(struct replay_opts *opts)
-{
- if (!opts->reflog_action) {
- opts->reflog_action = getenv(GIT_REFLOG_ACTION);
- opts->reflog_action =
- xstrdup(opts->reflog_action ? opts->reflog_action
- : action_name(opts));
- }
-
- return opts->reflog_action;
-}
-
-__attribute__((format (printf, 3, 4)))
-static const char *reflog_message(struct replay_opts *opts,
- const char *sub_action, const char *fmt, ...)
-{
- va_list ap;
- static struct strbuf buf = STRBUF_INIT;
-
- va_start(ap, fmt);
- strbuf_reset(&buf);
- strbuf_addstr(&buf, sequencer_reflog_action(opts));
- if (sub_action)
- strbuf_addf(&buf, " (%s)", sub_action);
- if (fmt) {
- strbuf_addstr(&buf, ": ");
- strbuf_vaddf(&buf, fmt, ap);
- }
- va_end(ap);
-
- return buf.buf;
-}
-
static struct commit *lookup_label(struct repository *r, const char *label,
int len, struct strbuf *buf)
{
@@ -4089,6 +4092,7 @@ static int do_merge(struct repository *r,
int merge_arg_len, oneline_offset, can_fast_forward, ret, k;
static struct lock_file lock;
const char *p;
+ const char *reflog_action = reflog_message(opts, "merge", NULL);
if (repo_hold_locked_index(r, &lock, LOCK_REPORT_ON_ERROR) < 0) {
ret = -1;
@@ -4360,14 +4364,15 @@ static int do_merge(struct repository *r,
* value (a negative one would indicate that the `merge`
* command needs to be rescheduled).
*/
- ret = !!run_git_commit(git_path_merge_msg(r), opts,
- run_commit_flags);
+ ret = !!run_git_commit(git_path_merge_msg(r), reflog_action,
+ opts, run_commit_flags);
if (!ret && flags & TODO_EDIT_MERGE_MSG) {
fast_forward_edit:
*check_todo = 1;
run_commit_flags |= AMEND_MSG | EDIT_MSG | VERIFY_MSG;
- ret = !!run_git_commit(NULL, opts, run_commit_flags);
+ ret = !!run_git_commit(NULL, reflog_action, opts,
+ run_commit_flags);
}
@@ -4882,13 +4887,9 @@ static int pick_one_commit(struct repository *r,
struct replay_opts *opts,
int *check_todo, int* reschedule)
{
- struct replay_ctx *ctx = opts->ctx;
int res;
struct todo_item *item = todo_list->items + todo_list->current;
const char *arg = todo_item_get_arg(todo_list, item);
- if (is_rebase_i(opts))
- ctx->reflog_message = reflog_message(
- opts, command_to_string(item->command), NULL);
res = do_pick_commit(r, item, opts, is_final_fixup(todo_list),
check_todo);
@@ -4947,7 +4948,6 @@ static int pick_commits(struct repository *r,
struct replay_ctx *ctx = opts->ctx;
int res = 0, reschedule = 0;
- ctx->reflog_message = sequencer_reflog_action(opts);
if (opts->allow_ff)
ASSERT(!(opts->signoff || opts->no_commit ||
opts->record_origin || should_edit(opts) ||
@@ -5208,6 +5208,7 @@ static int commit_staged_changes(struct repository *r,
unsigned int flags = ALLOW_EMPTY | EDIT_MSG;
unsigned int final_fixup = 0, is_clean;
struct strbuf rev = STRBUF_INIT;
+ const char *reflog_action = reflog_message(opts, "continue", NULL);
int ret;
if (has_unstaged_changes(r, 1)) {
@@ -5370,7 +5371,7 @@ static int commit_staged_changes(struct repository *r,
}
if (run_git_commit(final_fixup ? NULL : rebase_path_message(),
- opts, flags)) {
+ reflog_action, opts, flags)) {
ret = error(_("could not commit staged changes."));
goto out;
}
@@ -5402,7 +5403,6 @@ out:
int sequencer_continue(struct repository *r, struct replay_opts *opts)
{
- struct replay_ctx *ctx = opts->ctx;
struct todo_list todo_list = TODO_LIST_INIT;
int res;
@@ -5423,7 +5423,6 @@ int sequencer_continue(struct repository *r, struct replay_opts *opts)
unlink(rebase_path_dropped());
}
- ctx->reflog_message = reflog_message(opts, "continue", NULL);
if (commit_staged_changes(r, opts, &todo_list)) {
res = -1;
goto release_todo_list;
@@ -5475,7 +5474,6 @@ static int single_pick(struct repository *r,
TODO_PICK : TODO_REVERT;
item.commit = cmit;
- opts->ctx->reflog_message = sequencer_reflog_action(opts);
return do_pick_commit(r, &item, opts, 0, &check_todo);
}
@@ -6053,8 +6051,8 @@ static int make_script_with_merges(struct pretty_print_context *pp,
oidset_clear(&interesting);
oidset_clear(&child_seen);
oidset_clear(&shown);
- oidmap_free(&commit2todo, 1);
- oidmap_free(&state.commit2label, 1);
+ oidmap_clear(&commit2todo, 1);
+ oidmap_clear(&state.commit2label, 1);
hashmap_clear_and_free(&state.labels, struct labels_entry, entry);
strbuf_release(&state.buf);
@@ -6596,6 +6594,7 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
char **subjects;
struct commit_todo_item commit_todo;
struct todo_item *items = NULL;
+ int ret = 0;
init_commit_todo_item(&commit_todo);
/*
@@ -6626,8 +6625,8 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
}
if (is_fixup(item->command)) {
- clear_commit_todo_item(&commit_todo);
- return error(_("the script was already rearranged."));
+ ret = error(_("the script was already rearranged."));
+ goto cleanup;
}
repo_parse_commit(the_repository, item->commit);
@@ -6729,6 +6728,7 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
todo_list->items = items;
}
+cleanup:
free(next);
free(tail);
for (i = 0; i < todo_list->nr; i++)
@@ -6738,7 +6738,7 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
clear_commit_todo_item(&commit_todo);
- return 0;
+ return ret;
}
int sequencer_determine_whence(struct repository *r, enum commit_whence *whence)
diff --git a/shallow.c b/shallow.c
index 2f82ebd6e3..faeeeb45f9 100644
--- a/shallow.c
+++ b/shallow.c
@@ -310,7 +310,8 @@ static int write_one_shallow(const struct commit_graft *graft, void *cb_data)
if (graft->nr_parent != -1)
return 0;
if (data->flags & QUICK) {
- if (!repo_has_object_file(the_repository, &graft->oid))
+ if (!has_object(the_repository, &graft->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0;
} else if (data->flags & SEEN_ONLY) {
struct commit *c = lookup_commit(the_repository, &graft->oid);
@@ -476,7 +477,8 @@ void prepare_shallow_info(struct shallow_info *info, struct oid_array *sa)
ALLOC_ARRAY(info->ours, sa->nr);
ALLOC_ARRAY(info->theirs, sa->nr);
for (size_t i = 0; i < sa->nr; i++) {
- if (repo_has_object_file(the_repository, sa->oid + i)) {
+ if (has_object(the_repository, sa->oid + i,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
struct commit_graft *graft;
graft = lookup_commit_graft(the_repository,
&sa->oid[i]);
@@ -513,7 +515,8 @@ void remove_nonexistent_theirs_shallow(struct shallow_info *info)
for (i = dst = 0; i < info->nr_theirs; i++) {
if (i != dst)
info->theirs[dst] = info->theirs[i];
- if (repo_has_object_file(the_repository, oid + info->theirs[i]))
+ if (has_object(the_repository, oid + info->theirs[i],
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
dst++;
}
info->nr_theirs = dst;
diff --git a/streaming.c b/streaming.c
index 127d6b5d6a..6d6512e2e0 100644
--- a/streaming.c
+++ b/streaming.c
@@ -238,7 +238,7 @@ static int open_istream_loose(struct git_istream *st, struct repository *r,
return -1;
switch (unpack_loose_header(&st->z, st->u.loose.mapped,
st->u.loose.mapsize, st->u.loose.hdr,
- sizeof(st->u.loose.hdr), NULL)) {
+ sizeof(st->u.loose.hdr))) {
case ULHR_OK:
break;
case ULHR_BAD:
diff --git a/t/helper/meson.build b/t/helper/meson.build
index d2cabaa2bc..675e64c010 100644
--- a/t/helper/meson.build
+++ b/t/helper/meson.build
@@ -36,6 +36,7 @@ test_tool_sources = [
'test-mktemp.c',
'test-name-hash.c',
'test-online-cpus.c',
+ 'test-pack-deltas.c',
'test-pack-mtimes.c',
'test-parse-options.c',
'test-parse-pathspec-file.c',
@@ -76,6 +77,7 @@ test_tool_sources = [
'test-windows-named-pipe.c',
'test-write-cache.c',
'test-xml-encode.c',
+ 'test-zlib.c',
]
test_tool = executable('test-tool',
diff --git a/t/helper/test-pack-deltas.c b/t/helper/test-pack-deltas.c
new file mode 100644
index 0000000000..4caa024b1e
--- /dev/null
+++ b/t/helper/test-pack-deltas.c
@@ -0,0 +1,148 @@
+#define USE_THE_REPOSITORY_VARIABLE
+
+#include "test-tool.h"
+#include "git-compat-util.h"
+#include "delta.h"
+#include "git-zlib.h"
+#include "hash.h"
+#include "hex.h"
+#include "pack.h"
+#include "pack-objects.h"
+#include "parse-options.h"
+#include "setup.h"
+#include "strbuf.h"
+#include "string-list.h"
+
+static const char *usage_str[] = {
+ "test-tool pack-deltas --num-objects <num-objects>",
+ NULL
+};
+
+static unsigned long do_compress(void **pptr, unsigned long size)
+{
+ git_zstream stream;
+ void *in, *out;
+ unsigned long maxsize;
+
+ git_deflate_init(&stream, 1);
+ maxsize = git_deflate_bound(&stream, size);
+
+ in = *pptr;
+ out = xmalloc(maxsize);
+ *pptr = out;
+
+ stream.next_in = in;
+ stream.avail_in = size;
+ stream.next_out = out;
+ stream.avail_out = maxsize;
+ while (git_deflate(&stream, Z_FINISH) == Z_OK)
+ ; /* nothing */
+ git_deflate_end(&stream);
+
+ free(in);
+ return stream.total_out;
+}
+
+static void write_ref_delta(struct hashfile *f,
+ struct object_id *oid,
+ struct object_id *base)
+{
+ unsigned char header[MAX_PACK_OBJECT_HEADER];
+ unsigned long size, base_size, delta_size, compressed_size, hdrlen;
+ enum object_type type;
+ void *base_buf, *delta_buf;
+ void *buf = repo_read_object_file(the_repository,
+ oid, &type,
+ &size);
+
+ if (!buf)
+ die("unable to read %s", oid_to_hex(oid));
+
+ base_buf = repo_read_object_file(the_repository,
+ base, &type,
+ &base_size);
+
+ if (!base_buf)
+ die("unable to read %s", oid_to_hex(base));
+
+ delta_buf = diff_delta(base_buf, base_size,
+ buf, size, &delta_size, 0);
+
+ compressed_size = do_compress(&delta_buf, delta_size);
+
+ hdrlen = encode_in_pack_object_header(header, sizeof(header),
+ OBJ_REF_DELTA, delta_size);
+ hashwrite(f, header, hdrlen);
+ hashwrite(f, base->hash, the_repository->hash_algo->rawsz);
+ hashwrite(f, delta_buf, compressed_size);
+
+ free(buf);
+ free(base_buf);
+ free(delta_buf);
+}
+
+int cmd__pack_deltas(int argc, const char **argv)
+{
+ int num_objects = -1;
+ struct hashfile *f;
+ struct strbuf line = STRBUF_INIT;
+ struct option options[] = {
+ OPT_INTEGER('n', "num-objects", &num_objects, N_("the number of objects to write")),
+ OPT_END()
+ };
+
+ argc = parse_options(argc, argv, NULL,
+ options, usage_str, 0);
+
+ if (argc || num_objects < 0)
+ usage_with_options(usage_str, options);
+
+ setup_git_directory();
+
+ f = hashfd(the_repository->hash_algo, 1, "<stdout>");
+ write_pack_header(f, num_objects);
+
+ /* Read each line from stdin into 'line' */
+ while (strbuf_getline_lf(&line, stdin) != EOF) {
+ const char *type_str, *content_oid_str, *base_oid_str = NULL;
+ struct object_id content_oid, base_oid;
+ struct string_list items = STRING_LIST_INIT_NODUP;
+ /*
+ * Tokenize into two or three parts:
+ * 1. REF_DELTA, OFS_DELTA, or FULL.
+ * 2. The object ID for the content object.
+ * 3. The object ID for the base object (optional).
+ */
+ if (string_list_split_in_place(&items, line.buf, " ", 3) < 0)
+ die("invalid input format: %s", line.buf);
+
+ if (items.nr < 2)
+ die("invalid input format: %s", line.buf);
+
+ type_str = items.items[0].string;
+ content_oid_str = items.items[1].string;
+
+ if (get_oid_hex(content_oid_str, &content_oid))
+ die("invalid object: %s", content_oid_str);
+ if (items.nr >= 3) {
+ base_oid_str = items.items[2].string;
+ if (get_oid_hex(base_oid_str, &base_oid))
+ die("invalid object: %s", base_oid_str);
+ }
+ string_list_clear(&items, 0);
+
+ if (!strcmp(type_str, "REF_DELTA"))
+ write_ref_delta(f, &content_oid, &base_oid);
+ else if (!strcmp(type_str, "OFS_DELTA"))
+ die("OFS_DELTA not implemented");
+ else if (!strcmp(type_str, "FULL"))
+ die("FULL not implemented");
+ else
+ die("unknown pack type: %s", type_str);
+ }
+
+ finalize_hashfile(f, NULL, FSYNC_COMPONENT_PACK,
+ CSUM_HASH_IN_STREAM | CSUM_FSYNC | CSUM_CLOSE);
+ strbuf_release(&line);
+ return 0;
+}
diff --git a/t/helper/test-tool.c b/t/helper/test-tool.c
index 50dc4dac4e..a7abc618b3 100644
--- a/t/helper/test-tool.c
+++ b/t/helper/test-tool.c
@@ -46,6 +46,7 @@ static struct test_cmd cmds[] = {
{ "mktemp", cmd__mktemp },
{ "name-hash", cmd__name_hash },
{ "online-cpus", cmd__online_cpus },
+ { "pack-deltas", cmd__pack_deltas },
{ "pack-mtimes", cmd__pack_mtimes },
{ "parse-options", cmd__parse_options },
{ "parse-options-flags", cmd__parse_options_flags },
@@ -90,6 +91,7 @@ static struct test_cmd cmds[] = {
{ "windows-named-pipe", cmd__windows_named_pipe },
#endif
{ "write-cache", cmd__write_cache },
+ { "zlib", cmd__zlib },
};
static NORETURN void die_usage(void)
diff --git a/t/helper/test-tool.h b/t/helper/test-tool.h
index 6d62a5b53d..7f150fa1eb 100644
--- a/t/helper/test-tool.h
+++ b/t/helper/test-tool.h
@@ -39,6 +39,7 @@ int cmd__mergesort(int argc, const char **argv);
int cmd__mktemp(int argc, const char **argv);
int cmd__name_hash(int argc, const char **argv);
int cmd__online_cpus(int argc, const char **argv);
+int cmd__pack_deltas(int argc, const char **argv);
int cmd__pack_mtimes(int argc, const char **argv);
int cmd__parse_options(int argc, const char **argv);
int cmd__parse_options_flags(int argc, const char **argv);
@@ -83,6 +84,7 @@ int cmd__wildmatch(int argc, const char **argv);
int cmd__windows_named_pipe(int argc, const char **argv);
#endif
int cmd__write_cache(int argc, const char **argv);
+int cmd__zlib(int argc, const char **argv);
int cmd_hash_impl(int ac, const char **av, int algo, int unsafe);
diff --git a/t/helper/test-zlib.c b/t/helper/test-zlib.c
new file mode 100644
index 0000000000..de7e9edee1
--- /dev/null
+++ b/t/helper/test-zlib.c
@@ -0,0 +1,62 @@
+#include "test-tool.h"
+#include "git-zlib.h"
+#include "strbuf.h"
+
+static const char *zlib_usage = "test-tool zlib [inflate|deflate]";
+
+static void do_zlib(struct git_zstream *stream,
+ int (*zlib_func)(git_zstream *, int),
+ int fd_in, int fd_out)
+{
+ struct strbuf buf_in = STRBUF_INIT;
+ int status = Z_OK;
+
+ if (strbuf_read(&buf_in, fd_in, 0) < 0)
+ die_errno("read error");
+
+ stream->next_in = (unsigned char *)buf_in.buf;
+ stream->avail_in = buf_in.len;
+
+ while (status == Z_OK ||
+ (status == Z_BUF_ERROR && !stream->avail_out)) {
+ unsigned char buf_out[4096];
+
+ stream->next_out = buf_out;
+ stream->avail_out = sizeof(buf_out);
+
+ status = zlib_func(stream, Z_FINISH);
+ if (write_in_full(fd_out, buf_out,
+ sizeof(buf_out) - stream->avail_out) < 0)
+ die_errno("write error");
+ }
+
+ if (status != Z_STREAM_END)
+ die("zlib error %d", status);
+
+ strbuf_release(&buf_in);
+}
+
+int cmd__zlib(int argc, const char **argv)
+{
+ git_zstream stream;
+
+ if (argc != 2)
+ usage(zlib_usage);
+
+ memset(&stream, 0, sizeof(stream));
+
+ if (!strcmp(argv[1], "inflate")) {
+ git_inflate_init(&stream);
+ do_zlib(&stream, git_inflate, 0, 1);
+ git_inflate_end(&stream);
+ } else if (!strcmp(argv[1], "deflate")) {
+ git_deflate_init(&stream, Z_DEFAULT_COMPRESSION);
+ do_zlib(&stream, git_deflate, 0, 1);
+ git_deflate_end(&stream);
+ } else {
+ error("unknown mode: %s", argv[1]);
+ usage(zlib_usage);
+ }
+
+ return 0;
+}
diff --git a/t/lib-loose.sh b/t/lib-loose.sh
new file mode 100644
index 0000000000..3613631eaf
--- /dev/null
+++ b/t/lib-loose.sh
@@ -0,0 +1,30 @@
+# Support routines for hand-crafting loose objects.
+
+# Write a loose object into the odb at $1, with object type $2 and contents
+# from stdin. Writes the oid to stdout. Example:
+#
+# oid=$(echo foo | loose_obj .git/objects blob)
+#
+loose_obj () {
+ cat >tmp_loose.content &&
+ size=$(wc -c <tmp_loose.content) &&
+ {
+ # Do not quote $size here; we want the shell
+ # to strip whitespace that "wc" adds on some platforms.
+ printf "%s %s\0" "$2" $size &&
+ cat tmp_loose.content
+ } >tmp_loose.raw &&
+
+ oid=$(test-tool $test_hash_algo <tmp_loose.raw) &&
+ suffix=${oid#??} &&
+ prefix=${oid%$suffix} &&
+ dir=$1/$prefix &&
+ file=$dir/$suffix &&
+
+ test-tool zlib deflate <tmp_loose.raw >tmp_loose.zlib &&
+ mkdir -p "$dir" &&
+ mv tmp_loose.zlib "$file" &&
+
+ rm tmp_loose.raw tmp_loose.content &&
+ echo "$oid"
+}
diff --git a/t/meson.build b/t/meson.build
index b09c0becb8..fcfc1c2c2b 100644
--- a/t/meson.build
+++ b/t/meson.build
@@ -501,6 +501,7 @@ integration_tests = [
't4068-diff-symmetric-merge-base.sh',
't4069-remerge-diff.sh',
't4070-diff-pairs.sh',
+ 't4071-diff-minimal.sh',
't4100-apply-stat.sh',
't4101-apply-nonl.sh',
't4102-apply-rename.sh',
@@ -788,6 +789,7 @@ integration_tests = [
't6134-pathspec-in-submodule.sh',
't6135-pathspec-with-attrs.sh',
't6136-pathspec-in-bare.sh',
+ 't6137-pathspec-wildcards-literal.sh',
't6200-fmt-merge-msg.sh',
't6300-for-each-ref.sh',
't6301-for-each-ref-errors.sh',
diff --git a/t/t1001-read-tree-m-2way.sh b/t/t1001-read-tree-m-2way.sh
index 4a88bb9ef0..2e8d9384e1 100755
--- a/t/t1001-read-tree-m-2way.sh
+++ b/t/t1001-read-tree-m-2way.sh
@@ -362,7 +362,7 @@ test_expect_success 'a/b (untracked) vs a case setup.' '
test_expect_success 'a/b (untracked) vs a, plus c/d case test.' '
read_tree_u_must_fail -u -m "$treeH" "$treeM" &&
git ls-files --stage &&
- test -f a/b
+ test_path_is_file a/b
'
test_expect_success 'read-tree supports the super-prefix' '
diff --git a/t/t1006-cat-file.sh b/t/t1006-cat-file.sh
index ce8b27bf54..317da6869c 100755
--- a/t/t1006-cat-file.sh
+++ b/t/t1006-cat-file.sh
@@ -3,6 +3,7 @@
test_description='git cat-file'
. ./test-lib.sh
+. "$TEST_DIRECTORY/lib-loose.sh"
test_cmdmode_usage () {
test_expect_code 129 "$@" 2>err &&
@@ -136,18 +137,6 @@ $content"
test_cmp expect actual
'
- test_expect_success "Type of $type is correct using --allow-unknown-type" '
- echo $type >expect &&
- git cat-file -t --allow-unknown-type $oid >actual &&
- test_cmp expect actual
- '
-
- test_expect_success "Size of $type is correct using --allow-unknown-type" '
- echo $size >expect &&
- git cat-file -s --allow-unknown-type $oid >actual &&
- test_cmp expect actual
- '
-
test -z "$content" ||
test_expect_success "Content of $type is correct" '
echo_without_newline "$content" >expect &&
@@ -669,103 +658,75 @@ test_expect_success 'setup bogus data' '
bogus_short_type="bogus" &&
bogus_short_content="bogus" &&
bogus_short_size=$(strlen "$bogus_short_content") &&
- bogus_short_oid=$(echo_without_newline "$bogus_short_content" | git hash-object -t $bogus_short_type --literally -w --stdin) &&
+ bogus_short_oid=$(echo_without_newline "$bogus_short_content" | loose_obj .git/objects $bogus_short_type) &&
bogus_long_type="abcdefghijklmnopqrstuvwxyz1234679" &&
bogus_long_content="bogus" &&
bogus_long_size=$(strlen "$bogus_long_content") &&
- bogus_long_oid=$(echo_without_newline "$bogus_long_content" | git hash-object -t $bogus_long_type --literally -w --stdin)
+ bogus_long_oid=$(echo_without_newline "$bogus_long_content" | loose_obj .git/objects $bogus_long_type)
'
-for arg1 in '' --allow-unknown-type
+for arg1 in -s -t -p
do
- for arg2 in -s -t -p
- do
- if test "$arg1" = "--allow-unknown-type" && test "$arg2" = "-p"
- then
- continue
- fi
+ test_expect_success "cat-file $arg1 error on bogus short OID" '
+ cat >expect <<-\EOF &&
+ fatal: invalid object type
+ EOF
+ test_must_fail git cat-file $arg1 $bogus_short_oid >out 2>actual &&
+ test_must_be_empty out &&
+ test_cmp expect actual
+ '
- test_expect_success "cat-file $arg1 $arg2 error on bogus short OID" '
- cat >expect <<-\EOF &&
- fatal: invalid object type
+ test_expect_success "cat-file $arg1 error on bogus full OID" '
+ if test "$arg1" = "-p"
+ then
+ cat >expect <<-EOF
+ error: header for $bogus_long_oid too long, exceeds 32 bytes
+ fatal: Not a valid object name $bogus_long_oid
+ EOF
+ else
+ cat >expect <<-EOF
+ error: header for $bogus_long_oid too long, exceeds 32 bytes
+ fatal: git cat-file: could not get object info
EOF
+ fi &&
- if test "$arg1" = "--allow-unknown-type"
- then
- git cat-file $arg1 $arg2 $bogus_short_oid
- else
- test_must_fail git cat-file $arg1 $arg2 $bogus_short_oid >out 2>actual &&
- test_must_be_empty out &&
- test_cmp expect actual
- fi
- '
+ test_must_fail git cat-file $arg1 $bogus_long_oid >out 2>actual &&
+ test_must_be_empty out &&
+ test_cmp expect actual
+ '
- test_expect_success "cat-file $arg1 $arg2 error on bogus full OID" '
- if test "$arg2" = "-p"
- then
- cat >expect <<-EOF
- error: header for $bogus_long_oid too long, exceeds 32 bytes
- fatal: Not a valid object name $bogus_long_oid
- EOF
- else
- cat >expect <<-EOF
- error: header for $bogus_long_oid too long, exceeds 32 bytes
- fatal: git cat-file: could not get object info
- EOF
- fi &&
-
- if test "$arg1" = "--allow-unknown-type"
- then
- git cat-file $arg1 $arg2 $bogus_short_oid
- else
- test_must_fail git cat-file $arg1 $arg2 $bogus_long_oid >out 2>actual &&
- test_must_be_empty out &&
- test_cmp expect actual
- fi
- '
+ test_expect_success "cat-file $arg1 error on missing short OID" '
+ cat >expect.err <<-EOF &&
+ fatal: Not a valid object name $(test_oid deadbeef_short)
+ EOF
+ test_must_fail git cat-file $arg1 $(test_oid deadbeef_short) >out 2>err.actual &&
+ test_must_be_empty out &&
+ test_cmp expect.err err.actual
+ '
- test_expect_success "cat-file $arg1 $arg2 error on missing short OID" '
- cat >expect.err <<-EOF &&
- fatal: Not a valid object name $(test_oid deadbeef_short)
+ test_expect_success "cat-file $arg1 error on missing full OID" '
+ if test "$arg1" = "-p"
+ then
+ cat >expect.err <<-EOF
+ fatal: Not a valid object name $(test_oid deadbeef)
EOF
- test_must_fail git cat-file $arg1 $arg2 $(test_oid deadbeef_short) >out 2>err.actual &&
- test_must_be_empty out &&
- test_cmp expect.err err.actual
- '
-
- test_expect_success "cat-file $arg1 $arg2 error on missing full OID" '
- if test "$arg2" = "-p"
- then
- cat >expect.err <<-EOF
- fatal: Not a valid object name $(test_oid deadbeef)
- EOF
- else
- cat >expect.err <<-\EOF
- fatal: git cat-file: could not get object info
- EOF
- fi &&
- test_must_fail git cat-file $arg1 $arg2 $(test_oid deadbeef) >out 2>err.actual &&
- test_must_be_empty out &&
- test_cmp expect.err err.actual
- '
- done
+ else
+ cat >expect.err <<-\EOF
+ fatal: git cat-file: could not get object info
+ EOF
+ fi &&
+ test_must_fail git cat-file $arg1 $(test_oid deadbeef) >out 2>err.actual &&
+ test_must_be_empty out &&
+ test_cmp expect.err err.actual
+ '
done
-test_expect_success '-e is OK with a broken object without --allow-unknown-type' '
+test_expect_success '-e is OK with a broken object' '
git cat-file -e $bogus_short_oid
'
-test_expect_success '-e can not be combined with --allow-unknown-type' '
- test_expect_code 128 git cat-file -e --allow-unknown-type $bogus_short_oid
-'
-
-test_expect_success '-p cannot print a broken object even with --allow-unknown-type' '
- test_must_fail git cat-file -p $bogus_short_oid &&
- test_expect_code 128 git cat-file -p --allow-unknown-type $bogus_short_oid
-'
-
test_expect_success '<type> <hash> does not work with objects of broken types' '
cat >err.expect <<-\EOF &&
fatal: invalid object type "bogus"
@@ -788,60 +749,8 @@ test_expect_success 'broken types combined with --batch and --batch-check' '
test_cmp err.expect err.actual
'
-test_expect_success 'the --batch and --batch-check options do not combine with --allow-unknown-type' '
- test_expect_code 128 git cat-file --batch --allow-unknown-type <bogus-oid &&
- test_expect_code 128 git cat-file --batch-check --allow-unknown-type <bogus-oid
-'
-
-test_expect_success 'the --allow-unknown-type option does not consider replacement refs' '
- cat >expect <<-EOF &&
- $bogus_short_type
- EOF
- git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
- test_cmp expect actual &&
-
- # Create it manually, as "git replace" will die on bogus
- # types.
- head=$(git rev-parse --verify HEAD) &&
- test_when_finished "test-tool ref-store main delete-refs 0 msg refs/replace/$bogus_short_oid" &&
- test-tool ref-store main update-ref msg "refs/replace/$bogus_short_oid" $head $ZERO_OID REF_SKIP_OID_VERIFICATION &&
-
- cat >expect <<-EOF &&
- commit
- EOF
- git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
- test_cmp expect actual
-'
-
-test_expect_success "Type of broken object is correct" '
- echo $bogus_short_type >expect &&
- git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
- test_cmp expect actual
-'
-
-test_expect_success "Size of broken object is correct" '
- echo $bogus_short_size >expect &&
- git cat-file -s --allow-unknown-type $bogus_short_oid >actual &&
- test_cmp expect actual
-'
-
-test_expect_success 'clean up broken object' '
- rm .git/objects/$(test_oid_to_path $bogus_short_oid)
-'
-
-test_expect_success "Type of broken object is correct when type is large" '
- echo $bogus_long_type >expect &&
- git cat-file -t --allow-unknown-type $bogus_long_oid >actual &&
- test_cmp expect actual
-'
-
-test_expect_success "Size of large broken object is correct when type is large" '
- echo $bogus_long_size >expect &&
- git cat-file -s --allow-unknown-type $bogus_long_oid >actual &&
- test_cmp expect actual
-'
-
-test_expect_success 'clean up broken object' '
+test_expect_success 'clean up broken objects' '
+ rm .git/objects/$(test_oid_to_path $bogus_short_oid) &&
rm .git/objects/$(test_oid_to_path $bogus_long_oid)
'
@@ -903,25 +812,6 @@ test_expect_success 'cat-file -t and -s on corrupt loose object' '
)
'
-test_expect_success 'truncated object with --allow-unknown-type' - <<\EOT
- objtype='a really long type name that exceeds the 32-byte limit' &&
- blob=$(git hash-object -w --literally -t "$objtype" /dev/null) &&
- objpath=.git/objects/$(test_oid_to_path "$blob") &&
-
- # We want to truncate the object far enough in that we don't hit the
- # end while inflating the first 32 bytes (since we want to have to dig
- # for the trailing NUL of the header). But we don't want to go too far,
- # since our header isn't very big. And of course we are counting
- # deflated zlib bytes in the on-disk file, so it's a bit of a guess.
- # Empirically 50 seems to work.
- mv "$objpath" obj.bak &&
- test_when_finished 'mv obj.bak "$objpath"' &&
- test_copy_bytes 50 <obj.bak >"$objpath" &&
-
- test_must_fail git cat-file --allow-unknown-type -t $blob 2>err &&
- test_grep "unable to unpack $blob header" err
-EOT
-
test_expect_success 'object reading handles zlib dictionary' - <<\EOT
echo 'content that will be recompressed' >file &&
blob=$(git hash-object -w file) &&
diff --git a/t/t1007-hash-object.sh b/t/t1007-hash-object.sh
index b3cf53ff8c..dbbe9fb0d4 100755
--- a/t/t1007-hash-object.sh
+++ b/t/t1007-hash-object.sh
@@ -248,15 +248,8 @@ test_expect_success 'hash-object complains about truncated type name' '
test_must_fail git hash-object -t bl --stdin </dev/null
'
-test_expect_success '--literally' '
- t=1234567890 &&
- echo example | git hash-object -t $t --literally --stdin
-'
-
-test_expect_success '--literally with extra-long type' '
- t=12345678901234567890123456789012345678901234567890 &&
- t="$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t$t" &&
- echo example | git hash-object -t $t --literally --stdin
+test_expect_success '--literally complains about non-standard types' '
+ test_must_fail git hash-object -t bogus --literally --stdin
'
test_expect_success '--stdin outside of repository (uses SHA-1)' '
diff --git a/t/t1450-fsck.sh b/t/t1450-fsck.sh
index 0105045376..5ae86c42be 100755
--- a/t/t1450-fsck.sh
+++ b/t/t1450-fsck.sh
@@ -7,6 +7,7 @@ test_description='git fsck random collection of tests
'
. ./test-lib.sh
+. "$TEST_DIRECTORY/lib-loose.sh"
test_expect_success setup '
git config gc.auto 0 &&
@@ -71,30 +72,6 @@ test_expect_success 'object with hash mismatch' '
)
'
-test_expect_success 'object with hash and type mismatch' '
- git init --bare hash-type-mismatch &&
- (
- cd hash-type-mismatch &&
-
- oid=$(echo blob | git hash-object -w --stdin -t garbage --literally) &&
- oldoid=$oid &&
- old=$(test_oid_to_path "$oid") &&
- new=$(dirname $old)/$(test_oid ff_2) &&
- oid="$(dirname $new)$(basename $new)" &&
-
- mv objects/$old objects/$new &&
- git update-index --add --cacheinfo 100644 $oid foo &&
- tree=$(git write-tree) &&
- cmt=$(echo bogus | git commit-tree $tree) &&
- git update-ref refs/heads/bogus $cmt &&
-
-
- test_must_fail git fsck 2>out &&
- grep "^error: $oldoid: hash-path mismatch, found at: .*$new" out &&
- grep "^error: $oldoid: object is of unknown type '"'"'garbage'"'"'" out
- )
-'
-
test_expect_success 'zlib corrupt loose object output ' '
git init --bare corrupt-loose-output &&
(
@@ -997,12 +974,13 @@ test_expect_success 'fsck error and recovery on invalid object type' '
(
cd garbage-type &&
- garbage_blob=$(git hash-object --stdin -w -t garbage --literally </dev/null) &&
+ garbage_blob=$(loose_obj objects garbage </dev/null) &&
test_must_fail git fsck 2>err &&
grep -e "^error" -e "^fatal" err >errors &&
- test_line_count = 1 errors &&
- grep "$garbage_blob: object is of unknown type '"'"'garbage'"'"':" err
+ test_line_count = 2 errors &&
+ test_grep "unable to parse type from header .garbage" err &&
+ test_grep "$garbage_blob: object corrupt or missing:" err
)
'
diff --git a/t/t1512-rev-parse-disambiguation.sh b/t/t1512-rev-parse-disambiguation.sh
index 70f1e0a998..1a380a4184 100755
--- a/t/t1512-rev-parse-disambiguation.sh
+++ b/t/t1512-rev-parse-disambiguation.sh
@@ -24,6 +24,7 @@ GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
. ./test-lib.sh
+. "$TEST_DIRECTORY/lib-loose.sh"
test_cmp_failed_rev_parse () {
dir=$1
@@ -67,8 +68,8 @@ test_expect_success 'ambiguous loose bad object parsed as OBJ_BAD' '
cd blob.bad &&
# Both have the prefix "bad0"
- echo xyzfaowcoh | git hash-object -t bad -w --stdin --literally &&
- echo xyzhjpyvwl | git hash-object -t bad -w --stdin --literally
+ echo xyzfaowcoh | loose_obj objects bad &&
+ echo xyzhjpyvwl | loose_obj objects bad
) &&
test_cmp_failed_rev_parse blob.bad bad0 <<-\EOF
diff --git a/t/t3430-rebase-merges.sh b/t/t3430-rebase-merges.sh
index b84d68c4b9..ff81adab8c 100755
--- a/t/t3430-rebase-merges.sh
+++ b/t/t3430-rebase-merges.sh
@@ -86,7 +86,7 @@ test_expect_success 'create completely different structure' '
test_config sequence.editor \""$PWD"/replace-editor.sh\" &&
test_tick &&
git rebase -i -r A main &&
- test_cmp_graph <<-\EOF
+ test_cmp_graph <<-\EOF &&
* Merge the topic branch '\''onebranch'\''
|\
| * D
@@ -99,6 +99,15 @@ test_expect_success 'create completely different structure' '
|/
* A
EOF
+
+ head="$(git show-ref --verify -s --abbrev HEAD)" &&
+ cat >expect <<-EOF &&
+ $head HEAD@{0}: rebase (finish): returning to refs/heads/main
+ $head HEAD@{1}: rebase (merge): Merge the topic branch ${SQ}onebranch${SQ}
+ EOF
+
+ git reflog -n2 HEAD >actual &&
+ test_cmp expect actual
'
test_expect_success 'generate correct todo list' '
diff --git a/t/t4018/bash-bashism-style-complete-line-capture b/t/t4018/bash-bashism-style-complete-line-capture
new file mode 100644
index 0000000000..070b979fa6
--- /dev/null
+++ b/t/t4018/bash-bashism-style-complete-line-capture
@@ -0,0 +1,4 @@
+function myfunc # RIGHT
+{
+ echo 'ChangeMe'
+}
diff --git a/t/t4018/bash-posix-style-complete-line-capture b/t/t4018/bash-posix-style-complete-line-capture
new file mode 100644
index 0000000000..b56942f322
--- /dev/null
+++ b/t/t4018/bash-posix-style-complete-line-capture
@@ -0,0 +1,4 @@
+func() { # RIGHT
+
+ ChangeMe
+}
diff --git a/t/t4018/bash-posix-style-single-command-function b/t/t4018/bash-posix-style-single-command-function
new file mode 100644
index 0000000000..398ae1c5d2
--- /dev/null
+++ b/t/t4018/bash-posix-style-single-command-function
@@ -0,0 +1,3 @@
+RIGHT() echo "hello"
+
+ ChangeMe
diff --git a/t/t4034-diff-words.sh b/t/t4034-diff-words.sh
index f51d3557f1..0be647c2fb 100755
--- a/t/t4034-diff-words.sh
+++ b/t/t4034-diff-words.sh
@@ -320,6 +320,7 @@ test_expect_success 'unset default driver' '
test_language_driver ada
test_language_driver bibtex
+test_language_driver bash
test_language_driver cpp
test_language_driver csharp
test_language_driver css
diff --git a/t/t4034/bash/expect b/t/t4034/bash/expect
new file mode 100644
index 0000000000..1864ab25dc
--- /dev/null
+++ b/t/t4034/bash/expect
@@ -0,0 +1,36 @@
+<BOLD>diff --git a/pre b/post<RESET>
+<BOLD>index 09ac008..60ba6a2 100644<RESET>
+<BOLD>--- a/pre<RESET>
+<BOLD>+++ b/post<RESET>
+<CYAN>@@ -1,31 +1,31 @@<RESET>
+<RED>my_var<RESET><GREEN>new_var<RESET>=10
+x=<RED>123<RESET><GREEN>456<RESET>
+echo <RED>$1<RESET><GREEN>$2<RESET>
+echo <RED>$USER<RESET><GREEN>$USERNAME<RESET>
+${<RED>HOME<RESET><GREEN>HOMEDIR<RESET>}
+((a<RED>+<RESET><GREEN>+=<RESET>b))
+((a<RED>*<RESET><GREEN>*=<RESET>b))
+((a<RED>/<RESET><GREEN>/=<RESET>b))
+((a<RED>%<RESET><GREEN>%=<RESET>b))
+((a<RED>|<RESET><GREEN>|=<RESET>b))
+((a<RED>^<RESET><GREEN>^=<RESET>b))
+((a<RED>=<RESET><GREEN>==<RESET>b))
+((a<RED>!<RESET><GREEN>!=<RESET>b))
+((a<RED><<RESET><GREEN><=<RESET>b))
+((a<RED>><RESET><GREEN>>=<RESET>b))
+$((a<RED><<RESET><GREEN><<<RESET>b))
+$((a<RED>><RESET><GREEN>>><RESET>b))
+$((a<RED>&<RESET><GREEN>&&<RESET>b))
+$((a<RED>|<RESET><GREEN>||<RESET>b))
+${a<RED>:<RESET><GREEN>:-<RESET>b}
+${a<RED>:<RESET><GREEN>:=<RESET>b}
+${a<RED>:<RESET><GREEN>:+<RESET>b}
+${a<RED>:<RESET><GREEN>:?<RESET>b}
+${a<RED>#<RESET><GREEN>##<RESET>*/}
+${a<RED>%<RESET><GREEN>%%<RESET>.*}
+${a<RED>^<RESET><GREEN>^^<RESET>}
+${a<RED>,<RESET><GREEN>,,<RESET>}
+${<GREEN>!<RESET>a}
+${a[<RED>*<RESET><GREEN>@<RESET>]}
+ls <RED>-a<RESET><GREEN>-x<RESET>
+ls <RED>--all<RESET><GREEN>--color<RESET>
diff --git a/t/t4034/bash/post b/t/t4034/bash/post
new file mode 100644
index 0000000000..2bbee8936d
--- /dev/null
+++ b/t/t4034/bash/post
@@ -0,0 +1,31 @@
+new_var=10
+x=456
+echo $2
+echo $USERNAME
+${HOMEDIR}
+((a+=b))
+((a*=b))
+((a/=b))
+((a%=b))
+((a|=b))
+((a^=b))
+((a==b))
+((a!=b))
+((a<=b))
+((a>=b))
+$((a<<b))
+$((a>>b))
+$((a&&b))
+$((a||b))
+${a:-b}
+${a:=b}
+${a:+b}
+${a:?b}
+${a##*/}
+${a%%.*}
+${a^^}
+${a,,}
+${!a}
+${a[@]}
+ls -x
+ls --color
diff --git a/t/t4034/bash/pre b/t/t4034/bash/pre
new file mode 100644
index 0000000000..8d22039c40
--- /dev/null
+++ b/t/t4034/bash/pre
@@ -0,0 +1,31 @@
+my_var=10
+x=123
+echo $1
+echo $USER
+${HOME}
+((a+b))
+((a*b))
+((a/b))
+((a%b))
+((a|b))
+((a^b))
+((a=b))
+((a!b))
+((a<b))
+((a>b))
+$((a<b))
+$((a>b))
+$((a&b))
+$((a|b))
+${a:b}
+${a:b}
+${a:b}
+${a:b}
+${a#*/}
+${a%.*}
+${a^}
+${a,}
+${a}
+${a[*]}
+ls -a
+ls --all
diff --git a/t/t4071-diff-minimal.sh b/t/t4071-diff-minimal.sh
new file mode 100755
index 0000000000..4c484dadfb
--- /dev/null
+++ b/t/t4071-diff-minimal.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+test_description='minimal diff algorithm'
+
+. ./test-lib.sh
+
+test_expect_success 'minimal diff should not mark changes between changed lines' '
+ test_write_lines x x x x >pre &&
+ test_write_lines x x x A B C D x E F G >post &&
+ test_expect_code 1 git diff --no-index --minimal pre post >diff &&
+ test_grep ! ^[+-]x diff
+'
+
+test_done
diff --git a/t/t5309-pack-delta-cycles.sh b/t/t5309-pack-delta-cycles.sh
index 60fc710bac..6b03675d91 100755
--- a/t/t5309-pack-delta-cycles.sh
+++ b/t/t5309-pack-delta-cycles.sh
@@ -60,7 +60,10 @@ test_expect_success 'index-pack detects REF_DELTA cycles' '
test_expect_success 'failover to an object in another pack' '
clear_packs &&
git index-pack --stdin <ab.pack &&
- test_must_fail git index-pack --stdin --fix-thin <cycle.pack
+
+ # This cycle does not fail since the existence of A & B in
+ # the repo allows us to resolve the cycle.
+ git index-pack --stdin --fix-thin <cycle.pack
'
test_expect_success 'failover to a duplicate object in the same pack' '
@@ -72,7 +75,34 @@ test_expect_success 'failover to a duplicate object in the same pack' '
pack_obj $A
} >recoverable.pack &&
pack_trailer recoverable.pack &&
- test_must_fail git index-pack --fix-thin --stdin <recoverable.pack
+
+ # This cycle does not fail since the existence of a full copy
+ # of A in the pack allows us to resolve the cycle.
+ git index-pack --fix-thin --stdin <recoverable.pack
+'
+
+test_expect_success 'index-pack works with thin pack A->B->C with B on disk' '
+ git init server &&
+ (
+ cd server &&
+ test_commit_bulk 4
+ ) &&
+
+ A=$(git -C server rev-parse HEAD^{tree}) &&
+ B=$(git -C server rev-parse HEAD~1^{tree}) &&
+ C=$(git -C server rev-parse HEAD~2^{tree}) &&
+ git -C server reset --hard HEAD~1 &&
+
+ test-tool -C server pack-deltas --num-objects=2 >thin.pack <<-EOF &&
+ REF_DELTA $A $B
+ REF_DELTA $B $C
+ EOF
+
+ git clone "file://$(pwd)/server" client &&
+ (
+ cd client &&
+ git index-pack --fix-thin --stdin <../thin.pack
+ )
'
test_done
diff --git a/t/t5558-clone-bundle-uri.sh b/t/t5558-clone-bundle-uri.sh
index 3816ed5058..9b211a626b 100755
--- a/t/t5558-clone-bundle-uri.sh
+++ b/t/t5558-clone-bundle-uri.sh
@@ -58,7 +58,7 @@ test_expect_success 'create bundle' '
test_expect_success 'clone with path bundle' '
git clone --bundle-uri="clone-from/B.bundle" \
clone-from clone-path &&
- git -C clone-path rev-parse refs/bundles/topic >actual &&
+ git -C clone-path rev-parse refs/bundles/heads/topic >actual &&
git -C clone-from rev-parse topic >expect &&
test_cmp expect actual
'
@@ -68,9 +68,9 @@ test_expect_success 'clone with bundle that has bad header' '
git clone --bundle-uri="clone-from/bad-header.bundle" \
clone-from clone-bad-header 2>err &&
commit_b=$(git -C clone-from rev-parse B) &&
- test_grep "trying to write ref '\''refs/bundles/topic'\'' with nonexistent object $commit_b" err &&
+ test_grep "trying to write ref '\''refs/bundles/heads/topic'\'' with nonexistent object $commit_b" err &&
git -C clone-bad-header for-each-ref --format="%(refname)" >refs &&
- test_grep ! "refs/bundles/" refs
+ test_grep ! "refs/bundles/heads/" refs
'
test_expect_success 'clone with bundle that has bad object' '
@@ -78,8 +78,8 @@ test_expect_success 'clone with bundle that has bad object' '
git clone --bundle-uri="clone-from/bad-object.bundle" \
clone-from clone-bad-object-no-fsck &&
git -C clone-bad-object-no-fsck for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
- test_write_lines refs/bundles/bad >expect &&
+ grep "refs/bundles/heads/" refs >actual &&
+ test_write_lines refs/bundles/heads/bad >expect &&
test_cmp expect actual &&
# Unbundle fails with fsckObjects set true, but clone can still proceed.
@@ -87,14 +87,14 @@ test_expect_success 'clone with bundle that has bad object' '
clone-from clone-bad-object-fsck 2>err &&
test_grep "missingEmail" err &&
git -C clone-bad-object-fsck for-each-ref --format="%(refname)" >refs &&
- test_grep ! "refs/bundles/" refs
+ test_grep ! "refs/bundles/heads/" refs
'
test_expect_success 'clone with path bundle and non-default hash' '
test_when_finished "rm -rf clone-path-non-default-hash" &&
GIT_DEFAULT_HASH=sha256 git clone --bundle-uri="clone-from/B.bundle" \
clone-from clone-path-non-default-hash &&
- git -C clone-path-non-default-hash rev-parse refs/bundles/topic >actual &&
+ git -C clone-path-non-default-hash rev-parse refs/bundles/heads/topic >actual &&
git -C clone-from rev-parse topic >expect &&
test_cmp expect actual
'
@@ -102,11 +102,41 @@ test_expect_success 'clone with path bundle and non-default hash' '
test_expect_success 'clone with file:// bundle' '
git clone --bundle-uri="file://$(pwd)/clone-from/B.bundle" \
clone-from clone-file &&
- git -C clone-file rev-parse refs/bundles/topic >actual &&
+ git -C clone-file rev-parse refs/bundles/heads/topic >actual &&
git -C clone-from rev-parse topic >expect &&
test_cmp expect actual
'
+test_expect_success 'create bundle with tags' '
+ git init clone-from-tags &&
+ (
+ cd clone-from-tags &&
+ git checkout -b base &&
+ git checkout -b topic &&
+
+ test_commit A &&
+ git tag tag-A &&
+ git checkout -b base &&
+ git branch -d topic &&
+ test_commit B &&
+
+ git bundle create ALL.bundle --all &&
+ git bundle verify ALL.bundle
+ )
+'
+
+test_expect_success 'clone with tags bundle' '
+ git clone --bundle-uri="clone-from-tags/ALL.bundle" \
+ clone-from-tags clone-tags-path &&
+
+ git -C clone-from-tags for-each-ref --format="%(refname:lstrip=1)" \
+ >expect &&
+ git -C clone-tags-path for-each-ref --format="%(refname:lstrip=2)" \
+ refs/bundles >actual &&
+
+ test_cmp expect actual
+'
+
# To get interesting tests for bundle lists, we need to construct a
# somewhat-interesting commit history.
#
@@ -173,12 +203,12 @@ test_expect_success 'clone bundle list (file, no heuristic)' '
git -C clone-list-file cat-file --batch-check <oids &&
git -C clone-list-file for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/merge
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/merge
+ refs/bundles/heads/right
EOF
test_cmp expect actual
'
@@ -220,10 +250,10 @@ test_expect_success 'clone bundle list (file, all mode, some failures)' '
git -C clone-all-some cat-file --batch-check <oids &&
git -C clone-all-some for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
+ refs/bundles/heads/base
+ refs/bundles/heads/left
EOF
test_cmp expect actual
'
@@ -253,7 +283,7 @@ test_expect_success 'clone bundle list (file, all mode, all failures)' '
git -C clone-all-fail cat-file --batch-check <oids &&
git -C clone-all-fail for-each-ref --format="%(refname)" >refs &&
- ! grep "refs/bundles/" refs
+ ! grep "refs/bundles/heads/" refs
'
test_expect_success 'clone bundle list (file, any mode)' '
@@ -282,9 +312,9 @@ test_expect_success 'clone bundle list (file, any mode)' '
git -C clone-any-file cat-file --batch-check <oids &&
git -C clone-any-file for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
+ refs/bundles/heads/base
EOF
test_cmp expect actual
'
@@ -313,7 +343,7 @@ test_expect_success 'clone bundle list (file, any mode, all failures)' '
git -C clone-any-fail cat-file --batch-check <oids &&
git -C clone-any-fail for-each-ref --format="%(refname)" >refs &&
- ! grep "refs/bundles/" refs
+ ! grep "refs/bundles/heads/" refs
'
test_expect_success 'negotiation: bundle with part of wanted commits' '
@@ -322,10 +352,10 @@ test_expect_success 'negotiation: bundle with part of wanted commits' '
git clone --no-local --bundle-uri="clone-from/A.bundle" \
clone-from nego-bundle-part &&
git -C nego-bundle-part for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
- test_write_lines refs/bundles/topic >expect &&
+ grep "refs/bundles/heads/" refs >actual &&
+ test_write_lines refs/bundles/heads/topic >expect &&
test_cmp expect actual &&
- # Ensure that refs/bundles/topic are sent as "have".
+ # Ensure that refs/bundles/heads/topic are sent as "have".
tip=$(git -C clone-from rev-parse A) &&
test_grep "clone> have $tip" trace-packet.txt
'
@@ -337,8 +367,8 @@ test_expect_success 'negotiation: bundle with all wanted commits' '
--bundle-uri="clone-from/B.bundle" \
clone-from nego-bundle-all &&
git -C nego-bundle-all for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
- test_write_lines refs/bundles/topic >expect &&
+ grep "refs/bundles/heads/" refs >actual &&
+ test_write_lines refs/bundles/heads/topic >expect &&
test_cmp expect actual &&
# We already have all needed commits so no "want" needed.
test_grep ! "clone> want " trace-packet.txt
@@ -363,13 +393,13 @@ test_expect_success 'negotiation: bundle list (no heuristic)' '
clone-from nego-bundle-list-no-heuristic &&
git -C nego-bundle-list-no-heuristic for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
+ refs/bundles/heads/base
+ refs/bundles/heads/left
EOF
test_cmp expect actual &&
- tip=$(git -C nego-bundle-list-no-heuristic rev-parse refs/bundles/left) &&
+ tip=$(git -C nego-bundle-list-no-heuristic rev-parse refs/bundles/heads/left) &&
test_grep "clone> have $tip" trace-packet.txt
'
@@ -395,13 +425,13 @@ test_expect_success 'negotiation: bundle list (creationToken)' '
clone-from nego-bundle-list-heuristic &&
git -C nego-bundle-list-heuristic for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
+ refs/bundles/heads/base
+ refs/bundles/heads/left
EOF
test_cmp expect actual &&
- tip=$(git -C nego-bundle-list-heuristic rev-parse refs/bundles/left) &&
+ tip=$(git -C nego-bundle-list-heuristic rev-parse refs/bundles/heads/left) &&
test_grep "clone> have $tip" trace-packet.txt
'
@@ -428,10 +458,10 @@ test_expect_success 'negotiation: bundle list with all wanted commits' '
clone-from nego-bundle-list-all &&
git -C nego-bundle-list-all for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
+ refs/bundles/heads/base
+ refs/bundles/heads/left
EOF
test_cmp expect actual &&
# We already have all needed commits so no "want" needed.
@@ -465,7 +495,7 @@ test_expect_success 'clone HTTP bundle' '
git clone --bundle-uri="$HTTPD_URL/B.bundle" \
"$HTTPD_URL/smart/fetch.git" clone-http &&
- git -C clone-http rev-parse refs/bundles/topic >actual &&
+ git -C clone-http rev-parse refs/bundles/heads/topic >actual &&
git -C clone-from rev-parse topic >expect &&
test_cmp expect actual &&
@@ -476,7 +506,7 @@ test_expect_success 'clone HTTP bundle with non-default hash' '
test_when_finished "rm -rf clone-http-non-default-hash" &&
GIT_DEFAULT_HASH=sha256 git clone --bundle-uri="$HTTPD_URL/B.bundle" \
"$HTTPD_URL/smart/fetch.git" clone-http-non-default-hash &&
- git -C clone-http-non-default-hash rev-parse refs/bundles/topic >actual &&
+ git -C clone-http-non-default-hash rev-parse refs/bundles/heads/topic >actual &&
git -C clone-from rev-parse topic >expect &&
test_cmp expect actual
'
@@ -553,12 +583,12 @@ test_expect_success 'clone bundle list (HTTP, any mode)' '
git -C clone-any-http cat-file --batch-check <oids &&
git -C clone-list-file for-each-ref --format="%(refname)" >refs &&
- grep "refs/bundles/" refs >actual &&
+ grep "refs/bundles/heads/" refs >actual &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/merge
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/merge
+ refs/bundles/heads/right
EOF
test_cmp expect actual
'
@@ -641,9 +671,9 @@ test_expect_success 'clone incomplete bundle list (http, creationToken)' '
test_cmp expect actual &&
# We now have only one bundle ref.
- git -C clone-token-http for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C clone-token-http for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-\EOF &&
- refs/bundles/base
+ refs/bundles/heads/base
EOF
test_cmp expect refs &&
@@ -679,13 +709,13 @@ test_expect_success 'clone incomplete bundle list (http, creationToken)' '
test_cmp expect actual &&
# We now have all bundle refs.
- git -C clone-token-http for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C clone-token-http for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/merge
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/merge
+ refs/bundles/heads/right
EOF
test_cmp expect refs
'
@@ -721,9 +751,9 @@ test_expect_success 'http clone with bundle.heuristic creates fetch.bundleURI' '
test_cmp expect actual &&
# only received base ref from bundle-1
- git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-\EOF &&
- refs/bundles/base
+ refs/bundles/heads/base
EOF
test_cmp expect refs &&
@@ -749,10 +779,10 @@ test_expect_success 'http clone with bundle.heuristic creates fetch.bundleURI' '
test_cmp expect actual &&
# received left from bundle-2
- git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
+ refs/bundles/heads/base
+ refs/bundles/heads/left
EOF
test_cmp expect refs &&
@@ -795,12 +825,12 @@ test_expect_success 'http clone with bundle.heuristic creates fetch.bundleURI' '
# received merge ref from bundle-4, but right is missing
# because we did not download bundle-3.
- git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-http-4 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-\EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/merge
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/merge
EOF
test_cmp expect refs &&
@@ -862,7 +892,7 @@ test_expect_success 'creationToken heuristic with failed downloads (clone)' '
test_cmp expect actual &&
# All bundles failed to unbundle
- git -C download-1 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C download-1 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
test_must_be_empty refs &&
# Case 2: middle bundle does not exist, only two bundles can unbundle
@@ -909,10 +939,10 @@ test_expect_success 'creationToken heuristic with failed downloads (clone)' '
test_cmp expect actual &&
# bundle-1 and bundle-3 could unbundle, but bundle-4 could not
- git -C download-2 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C download-2 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-EOF &&
- refs/bundles/base
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/right
EOF
test_cmp expect refs &&
@@ -961,11 +991,11 @@ test_expect_success 'creationToken heuristic with failed downloads (clone)' '
test_cmp expect actual &&
# fake.bundle did not unbundle, but the others did.
- git -C download-3 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C download-3 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/right
EOF
test_cmp expect refs
'
@@ -1083,15 +1113,15 @@ test_expect_success 'creationToken heuristic with failed downloads (fetch)' '
test_cmp expect actual &&
# Check which bundles have unbundled by refs
- git -C fetch-1 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-1 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/lefter
- refs/bundles/merge
- refs/bundles/right
- refs/bundles/righter
- refs/bundles/top
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/lefter
+ refs/bundles/heads/merge
+ refs/bundles/heads/right
+ refs/bundles/heads/righter
+ refs/bundles/heads/top
EOF
test_cmp expect refs &&
@@ -1144,12 +1174,12 @@ test_expect_success 'creationToken heuristic with failed downloads (fetch)' '
test_cmp expect actual &&
# Check which bundles have unbundled by refs
- git -C fetch-2 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-2 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/merge
- refs/bundles/right
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/merge
+ refs/bundles/heads/right
EOF
test_cmp expect refs &&
@@ -1204,13 +1234,13 @@ test_expect_success 'creationToken heuristic with failed downloads (fetch)' '
test_cmp expect actual &&
# Check which bundles have unbundled by refs
- git -C fetch-3 for-each-ref --format="%(refname)" "refs/bundles/*" >refs &&
+ git -C fetch-3 for-each-ref --format="%(refname)" "refs/bundles/heads/*" >refs &&
cat >expect <<-EOF &&
- refs/bundles/base
- refs/bundles/left
- refs/bundles/lefter
- refs/bundles/right
- refs/bundles/righter
+ refs/bundles/heads/base
+ refs/bundles/heads/left
+ refs/bundles/heads/lefter
+ refs/bundles/heads/right
+ refs/bundles/heads/righter
EOF
test_cmp expect refs
'
diff --git a/t/t6011-rev-list-with-bad-commit.sh b/t/t6011-rev-list-with-bad-commit.sh
index b6f3344dbf..1dd1e50d21 100755
--- a/t/t6011-rev-list-with-bad-commit.sh
+++ b/t/t6011-rev-list-with-bad-commit.sh
@@ -38,6 +38,7 @@ test_expect_success 'verify number of revisions' \
test_expect_success 'corrupt second commit object' '
for p in .git/objects/pack/*.pack
do
+ chmod +w "$p" &&
sed "s/second commit/socond commit/" "$p" >"$p.munged" &&
mv "$p.munged" "$p" ||
return 1
diff --git a/t/t6137-pathspec-wildcards-literal.sh b/t/t6137-pathspec-wildcards-literal.sh
new file mode 100755
index 0000000000..20abad5667
--- /dev/null
+++ b/t/t6137-pathspec-wildcards-literal.sh
@@ -0,0 +1,429 @@
+#!/bin/sh
+test_description='test wildcards and literals with git add/commit (subshell style)'
+
+. ./test-lib.sh
+
+test_have_prereq FUNNYNAMES || {
+ skip_all='skipping: needs FUNNYNAMES (non-Windows only)'
+ test_done
+}
+
+prepare_test_files () {
+ for f in "*" "**" "?" "[abc]" "a" "f*" "f**" "f?z" "foo*bar" "hello?world" "hello_world"
+ do
+ >"$f" || return
+ done
+}
+
+test_expect_success 'add wildcard *' '
+ git init test-asterisk &&
+ (
+ cd test-asterisk &&
+ prepare_test_files &&
+ git add "*" &&
+ cat >expect <<-EOF &&
+ *
+ **
+ ?
+ [abc]
+ a
+ f*
+ f**
+ f?z
+ foo*bar
+ hello?world
+ hello_world
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal \*' '
+ git init test-asterisk-literal &&
+ (
+ cd test-asterisk-literal &&
+ prepare_test_files &&
+ git add "\*" &&
+ cat >expect <<-EOF &&
+ *
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard **' '
+ git init test-dstar &&
+ (
+ cd test-dstar &&
+ prepare_test_files &&
+ git add "**" &&
+ cat >expect <<-EOF &&
+ *
+ **
+ ?
+ [abc]
+ a
+ f*
+ f**
+ f?z
+ foo*bar
+ hello?world
+ hello_world
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard ?' '
+ git init test-qmark &&
+ (
+ cd test-qmark &&
+ prepare_test_files &&
+ git add "?" &&
+ cat >expect <<-\EOF | sort &&
+ *
+ ?
+ a
+ EOF
+ git ls-files | sort >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard [abc]' '
+ git init test-brackets &&
+ (
+ cd test-brackets &&
+ prepare_test_files &&
+ git add "[abc]" &&
+ cat >expect <<-\EOF | sort &&
+ [abc]
+ a
+ EOF
+ git ls-files | sort >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard f*' '
+ git init test-f-wild &&
+ (
+ cd test-f-wild &&
+ prepare_test_files &&
+ git add "f*" &&
+ cat >expect <<-\EOF | sort &&
+ f*
+ f**
+ f?z
+ foo*bar
+ EOF
+ git ls-files | sort >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal f\*' '
+ git init test-f-lit &&
+ (
+ cd test-f-lit &&
+ prepare_test_files &&
+ git add "f\*" &&
+ cat >expect <<-\EOF &&
+ f*
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard f**' '
+ git init test-fdstar &&
+ (
+ cd test-fdstar &&
+ prepare_test_files &&
+ git add "f**" &&
+ cat >expect <<-\EOF | sort &&
+ f*
+ f**
+ f?z
+ foo*bar
+ EOF
+ git ls-files | sort >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal f\*\*' '
+ git init test-fdstar-lit &&
+ (
+ cd test-fdstar-lit &&
+ prepare_test_files &&
+ git add "f\*\*" &&
+ cat >expect <<-\EOF &&
+ f**
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard f?z' '
+ git init test-fqz &&
+ (
+ cd test-fqz &&
+ prepare_test_files &&
+ git add "f?z" &&
+ cat >expect <<-\EOF &&
+ f?z
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal \? literal' '
+ git init test-q-lit &&
+ (
+ cd test-q-lit &&
+ prepare_test_files &&
+ git add "\?" &&
+ cat >expect <<-\EOF &&
+ ?
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard foo*bar' '
+ git init test-foobar &&
+ (
+ cd test-foobar &&
+ prepare_test_files &&
+ git add "foo*bar" &&
+ cat >expect <<-\EOF &&
+ foo*bar
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add wildcard hello?world' '
+ git init test-hellowild &&
+ (
+ cd test-hellowild &&
+ prepare_test_files &&
+ git add "hello?world" &&
+ cat >expect <<-\EOF &&
+ hello?world
+ hello_world
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal hello\?world' '
+ git init test-hellolit &&
+ (
+ cd test-hellolit &&
+ prepare_test_files &&
+ git add "hello\?world" &&
+ cat >expect <<-\EOF &&
+ hello?world
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'add literal [abc]' '
+ git init test-brackets-lit &&
+ (
+ cd test-brackets-lit &&
+ prepare_test_files &&
+ git add "\[abc\]" &&
+ cat >expect <<-\EOF &&
+ [abc]
+ EOF
+ git ls-files >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: wildcard *' '
+ git init test-c-asterisk &&
+ (
+ cd test-c-asterisk &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c1" -- "*" &&
+ cat >expect <<-EOF &&
+ *
+ **
+ ?
+ [abc]
+ a
+ f*
+ f**
+ f?z
+ foo*bar
+ hello?world
+ hello_world
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: literal *' '
+ git init test-c-asterisk-lit &&
+ (
+ cd test-c-asterisk-lit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c2" -- "\*" &&
+ cat >expect <<-EOF &&
+ *
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: wildcard f*' '
+ git init test-c-fwild &&
+ (
+ cd test-c-fwild &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c3" -- "f*" &&
+ cat >expect <<-EOF &&
+ f*
+ f**
+ f?z
+ foo*bar
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: literal f\*' '
+ git init test-c-flit &&
+ (
+ cd test-c-flit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c4" -- "f\*" &&
+ cat >expect <<-EOF &&
+ f*
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: wildcard pathspec limits commit' '
+ git init test-c-pathlimit &&
+ (
+ cd test-c-pathlimit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c5" -- "f**" &&
+ cat >expect <<-EOF &&
+ f*
+ f**
+ f?z
+ foo*bar
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: literal f\*\*' '
+ git init test-c-fdstar-lit &&
+ (
+ cd test-c-fdstar-lit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c6" -- "f\*\*" &&
+ cat >expect <<-EOF &&
+ f**
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: wildcard ?' '
+ git init test-c-qwild &&
+ (
+ cd test-c-qwild &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c7" -- "?" &&
+ cat >expect <<-EOF &&
+ *
+ ?
+ a
+ EOF
+ git ls-tree -r --name-only HEAD | sort >actual &&
+ sort expect >expect.sorted &&
+ test_cmp expect.sorted actual
+ )
+'
+
+test_expect_success 'commit: literal \?' '
+ git init test-c-qlit &&
+ (
+ cd test-c-qlit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c8" -- "\?" &&
+ cat >expect <<-EOF &&
+ ?
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'commit: wildcard hello?world' '
+ git init test-c-hellowild &&
+ (
+ cd test-c-hellowild &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c9" -- "hello?world" &&
+ cat >expect <<-EOF &&
+ hello?world
+ hello_world
+ EOF
+ git ls-tree -r --name-only HEAD | sort >actual &&
+ sort expect >expect.sorted &&
+ test_cmp expect.sorted actual
+ )
+'
+
+test_expect_success 'commit: literal hello\?world' '
+ git init test-c-hellolit &&
+ (
+ cd test-c-hellolit &&
+ prepare_test_files &&
+ git add . &&
+ git commit -m "c10" -- "hello\?world" &&
+ cat >expect <<-EOF &&
+ hello?world
+ EOF
+ git ls-tree -r --name-only HEAD >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_done
diff --git a/t/t7900-maintenance.sh b/t/t7900-maintenance.sh
index 9b82e11c10..8cf89e285f 100755
--- a/t/t7900-maintenance.sh
+++ b/t/t7900-maintenance.sh
@@ -493,6 +493,121 @@ test_expect_success 'reflog-expire task --auto only packs when exceeding limits'
test_subcommand git reflog expire --all <reflog-expire-auto.txt
'
+test_expect_worktree_prune () {
+ negate=
+ if test "$1" = "!"
+ then
+ negate="!"
+ shift
+ fi
+
+ rm -f "worktree-prune.txt" &&
+ GIT_TRACE2_EVENT="$(pwd)/worktree-prune.txt" "$@" &&
+ test_subcommand $negate git worktree prune --expire 3.months.ago <worktree-prune.txt
+}
+
+test_expect_success 'worktree-prune task without --auto always prunes' '
+ test_expect_worktree_prune git maintenance run --task=worktree-prune
+'
+
+test_expect_success 'worktree-prune task --auto only prunes with prunable worktree' '
+ test_expect_worktree_prune ! git maintenance run --auto --task=worktree-prune &&
+ mkdir .git/worktrees &&
+ : >.git/worktrees/abc &&
+ test_expect_worktree_prune git maintenance run --auto --task=worktree-prune
+'
+
+test_expect_success 'worktree-prune task with --auto honors maintenance.worktree-prune.auto' '
+ # A negative value should always prune.
+ test_expect_worktree_prune git -c maintenance.worktree-prune.auto=-1 maintenance run --auto --task=worktree-prune &&
+
+ mkdir .git/worktrees &&
+ : >.git/worktrees/first &&
+ : >.git/worktrees/second &&
+ : >.git/worktrees/third &&
+
+ # Zero should never prune.
+ test_expect_worktree_prune ! git -c maintenance.worktree-prune.auto=0 maintenance run --auto --task=worktree-prune &&
+ # A positive value should require at least this many prunable worktrees.
+ test_expect_worktree_prune ! git -c maintenance.worktree-prune.auto=4 maintenance run --auto --task=worktree-prune &&
+ test_expect_worktree_prune git -c maintenance.worktree-prune.auto=3 maintenance run --auto --task=worktree-prune
+'
+
+test_expect_success 'worktree-prune task with --auto honors maintenance.worktree-prune.auto' '
+ # A negative value should always prune.
+ test_expect_worktree_prune git -c maintenance.worktree-prune.auto=-1 maintenance run --auto --task=worktree-prune &&
+
+ mkdir .git/worktrees &&
+ : >.git/worktrees/first &&
+ : >.git/worktrees/second &&
+ : >.git/worktrees/third &&
+
+ # Zero should never prune.
+ test_expect_worktree_prune ! git -c maintenance.worktree-prune.auto=0 maintenance run --auto --task=worktree-prune &&
+ # A positive value should require at least this many prunable worktrees.
+ test_expect_worktree_prune ! git -c maintenance.worktree-prune.auto=4 maintenance run --auto --task=worktree-prune &&
+ test_expect_worktree_prune git -c maintenance.worktree-prune.auto=3 maintenance run --auto --task=worktree-prune
+'
+
+test_expect_success 'worktree-prune task honors gc.worktreePruneExpire' '
+ git worktree add worktree &&
+ rm -rf worktree &&
+
+ rm -f worktree-prune.txt &&
+ GIT_TRACE2_EVENT="$(pwd)/worktree-prune.txt" git -c gc.worktreePruneExpire=1.week.ago maintenance run --auto --task=worktree-prune &&
+ test_subcommand ! git worktree prune --expire 1.week.ago <worktree-prune.txt &&
+ test_path_is_dir .git/worktrees/worktree &&
+
+ rm -f worktree-prune.txt &&
+ GIT_TRACE2_EVENT="$(pwd)/worktree-prune.txt" git -c gc.worktreePruneExpire=now maintenance run --auto --task=worktree-prune &&
+ test_subcommand git worktree prune --expire now <worktree-prune.txt &&
+ test_path_is_missing .git/worktrees/worktree
+'
+
+test_expect_rerere_gc () {
+ negate=
+ if test "$1" = "!"
+ then
+ negate="!"
+ shift
+ fi
+
+ rm -f "rerere-gc.txt" &&
+ GIT_TRACE2_EVENT="$(pwd)/rerere-gc.txt" "$@" &&
+ test_subcommand $negate git rerere gc <rerere-gc.txt
+}
+
+test_expect_success 'rerere-gc task without --auto always collects garbage' '
+ test_expect_rerere_gc git maintenance run --task=rerere-gc
+'
+
+test_expect_success 'rerere-gc task with --auto only prunes with prunable entries' '
+ test_when_finished "rm -rf .git/rr-cache" &&
+ test_expect_rerere_gc ! git maintenance run --auto --task=rerere-gc &&
+ mkdir .git/rr-cache &&
+ test_expect_rerere_gc ! git maintenance run --auto --task=rerere-gc &&
+ : >.git/rr-cache/entry &&
+ test_expect_rerere_gc git maintenance run --auto --task=rerere-gc
+'
+
+test_expect_success 'rerere-gc task with --auto honors maintenance.rerere-gc.auto' '
+ test_when_finished "rm -rf .git/rr-cache" &&
+
+ # A negative value should always prune.
+ test_expect_rerere_gc git -c maintenance.rerere-gc.auto=-1 maintenance run --auto --task=rerere-gc &&
+
+ # A positive value prunes when there is at least one entry.
+ test_expect_rerere_gc ! git -c maintenance.rerere-gc.auto=9000 maintenance run --auto --task=rerere-gc &&
+ mkdir .git/rr-cache &&
+ test_expect_rerere_gc ! git -c maintenance.rerere-gc.auto=9000 maintenance run --auto --task=rerere-gc &&
+ : >.git/rr-cache/entry-1 &&
+ test_expect_rerere_gc git -c maintenance.rerere-gc.auto=9000 maintenance run --auto --task=rerere-gc &&
+
+ # Zero should never prune.
+ : >.git/rr-cache/entry-1 &&
+ test_expect_rerere_gc ! git -c maintenance.rerere-gc.auto=0 maintenance run --auto --task=rerere-gc
+'
+
test_expect_success '--auto and --schedule incompatible' '
test_must_fail git maintenance run --auto --schedule=daily 2>err &&
test_grep "at most one" err
diff --git a/t/t9210-scalar.sh b/t/t9210-scalar.sh
index a81662713e..bd6f0c40d2 100755
--- a/t/t9210-scalar.sh
+++ b/t/t9210-scalar.sh
@@ -108,7 +108,7 @@ test_expect_success 'scalar register warns when background maintenance fails' '
git init register-repo &&
GIT_TEST_MAINT_SCHEDULER="crontab:false,launchctl:false,schtasks:false" \
scalar register register-repo 2>err &&
- grep "could not turn on maintenance" err
+ grep "could not toggle maintenance" err
'
test_expect_success 'scalar unregister' '
@@ -129,6 +129,17 @@ test_expect_success 'scalar unregister' '
scalar unregister vanish
'
+test_expect_success 'scalar register --no-maintenance' '
+ git init register-no-maint &&
+ event_log="$(pwd)/no-maint.event" &&
+ GIT_TEST_MAINT_SCHEDULER="crontab:false,launchctl:false,schtasks:false" \
+ GIT_TRACE2_EVENT="$event_log" \
+ GIT_TRACE2_EVENT_DEPTH=100 \
+ scalar register --no-maintenance register-no-maint 2>err &&
+ test_must_be_empty err &&
+ test_subcommand ! git maintenance unregister --force <no-maint.event
+'
+
test_expect_success 'set up repository to clone' '
test_commit first &&
test_commit second &&
@@ -199,7 +210,18 @@ test_expect_success 'scalar reconfigure' '
GIT_TRACE2_EVENT="$(pwd)/reconfigure" scalar reconfigure -a &&
test_path_is_file one/src/cron.txt &&
test true = "$(git -C one/src config core.preloadIndex)" &&
- test_subcommand git maintenance start <reconfigure
+ test_subcommand git maintenance start <reconfigure &&
+ test_subcommand ! git maintenance unregister --force <reconfigure &&
+
+ GIT_TRACE2_EVENT="$(pwd)/reconfigure-maint-disable" \
+ scalar reconfigure -a --maintenance=disable &&
+ test_subcommand ! git maintenance start <reconfigure-maint-disable &&
+ test_subcommand git maintenance unregister --force <reconfigure-maint-disable &&
+
+ GIT_TRACE2_EVENT="$(pwd)/reconfigure-maint-keep" \
+ scalar reconfigure --maintenance=keep -a &&
+ test_subcommand ! git maintenance start <reconfigure-maint-keep &&
+ test_subcommand ! git maintenance unregister --force <reconfigure-maint-keep
'
test_expect_success 'scalar reconfigure --all with includeIf.onbranch' '
diff --git a/t/t9211-scalar-clone.sh b/t/t9211-scalar-clone.sh
index 01f71910f5..bfbf22a462 100755
--- a/t/t9211-scalar-clone.sh
+++ b/t/t9211-scalar-clone.sh
@@ -177,7 +177,16 @@ test_expect_success 'progress without tty' '
test_expect_success 'scalar clone warns when background maintenance fails' '
GIT_TEST_MAINT_SCHEDULER="crontab:false,launchctl:false,schtasks:false" \
scalar clone "file://$(pwd)/to-clone" maint-fail 2>err &&
- grep "could not turn on maintenance" err
+ grep "could not toggle maintenance" err
+'
+
+test_expect_success 'scalar clone --no-maintenance' '
+ GIT_TEST_MAINT_SCHEDULER="crontab:false,launchctl:false,schtasks:false" \
+ GIT_TRACE2_EVENT="$(pwd)/no-maint.event" \
+ GIT_TRACE2_EVENT_DEPTH=100 \
+ scalar clone --no-maintenance "file://$(pwd)/to-clone" no-maint 2>err &&
+ ! grep "could not toggle maintenance" err &&
+ test_subcommand ! git maintenance unregister --force <no-maint.event
'
test_expect_success '`scalar clone --no-src`' '
diff --git a/t/unit-tests/t-reftable-block.c b/t/unit-tests/t-reftable-block.c
index 7dbd93601c..52f1dae1c9 100644
--- a/t/unit-tests/t-reftable-block.c
+++ b/t/unit-tests/t-reftable-block.c
@@ -64,7 +64,8 @@ static void t_ref_block_read_write(void)
block_writer_release(&bw);
block_source_from_buf(&source ,&block_data);
- reftable_block_init(&block, &source, 0, header_off, block_size, REFTABLE_HASH_SIZE_SHA1);
+ reftable_block_init(&block, &source, 0, header_off, block_size,
+ REFTABLE_HASH_SIZE_SHA1, REFTABLE_BLOCK_TYPE_REF);
block_iter_init(&it, &block);
@@ -153,7 +154,8 @@ static void t_log_block_read_write(void)
block_writer_release(&bw);
block_source_from_buf(&source, &block_data);
- reftable_block_init(&block, &source, 0, header_off, block_size, REFTABLE_HASH_SIZE_SHA1);
+ reftable_block_init(&block, &source, 0, header_off, block_size,
+ REFTABLE_HASH_SIZE_SHA1, REFTABLE_BLOCK_TYPE_LOG);
block_iter_init(&it, &block);
@@ -245,7 +247,8 @@ static void t_obj_block_read_write(void)
block_writer_release(&bw);
block_source_from_buf(&source, &block_data);
- reftable_block_init(&block, &source, 0, header_off, block_size, REFTABLE_HASH_SIZE_SHA1);
+ reftable_block_init(&block, &source, 0, header_off, block_size,
+ REFTABLE_HASH_SIZE_SHA1, REFTABLE_BLOCK_TYPE_OBJ);
block_iter_init(&it, &block);
@@ -329,7 +332,8 @@ static void t_index_block_read_write(void)
block_writer_release(&bw);
block_source_from_buf(&source, &block_data);
- reftable_block_init(&block, &source, 0, header_off, block_size, REFTABLE_HASH_SIZE_SHA1);
+ reftable_block_init(&block, &source, 0, header_off, block_size,
+ REFTABLE_HASH_SIZE_SHA1, REFTABLE_BLOCK_TYPE_INDEX);
block_iter_init(&it, &block);
@@ -411,7 +415,8 @@ static void t_block_iterator(void)
check_int(err, >, 0);
block_source_from_buf(&source, &data);
- reftable_block_init(&block, &source, 0, 0, data.len, REFTABLE_HASH_SIZE_SHA1);
+ reftable_block_init(&block, &source, 0, 0, data.len,
+ REFTABLE_HASH_SIZE_SHA1, REFTABLE_BLOCK_TYPE_REF);
err = reftable_block_init_iterator(&block, &it);
check_int(err, ==, 0);
diff --git a/t/unit-tests/u-oidmap.c b/t/unit-tests/u-oidmap.c
index dc805b7e3c..b23af449f6 100644
--- a/t/unit-tests/u-oidmap.c
+++ b/t/unit-tests/u-oidmap.c
@@ -35,7 +35,7 @@ void test_oidmap__initialize(void)
void test_oidmap__cleanup(void)
{
- oidmap_free(&map, 1);
+ oidmap_clear(&map, 1);
}
void test_oidmap__replace(void)
diff --git a/tree-walk.h b/tree-walk.h
index aaea689f9a..29a55328bd 100644
--- a/tree-walk.h
+++ b/tree-walk.h
@@ -176,11 +176,14 @@ struct traverse_info {
};
/**
- * Find an entry in a tree given a pathname and the sha1 of a tree to
- * search. Returns 0 if the entry is found and -1 otherwise. The third
- * and fourth parameters are set to the entry's sha1 and mode respectively.
- */
-int get_tree_entry(struct repository *, const struct object_id *, const char *, struct object_id *, unsigned short *);
+ * Walk trees starting with "tree_oid" to find the entry for "name", and
+ * return the the object name and the mode of the found entry via the
+ * "oid" and "mode" parameters. Return 0 if the entry is found, and -1
+ * otherwise.
+ */
+int get_tree_entry(struct repository *repo, const struct object_id *tree_oid,
+ const char *name, struct object_id *oid,
+ unsigned short *mode);
/**
* Generate the full pathname of a tree entry based from the root of the
diff --git a/upload-pack.c b/upload-pack.c
index 30e4630f3a..956da5b061 100644
--- a/upload-pack.c
+++ b/upload-pack.c
@@ -509,8 +509,7 @@ static int got_oid(struct upload_pack_data *data,
{
if (get_oid_hex(hex, oid))
die("git upload-pack: expected SHA1 object, got '%s'", hex);
- if (!repo_has_object_file_with_flags(the_repository, oid,
- OBJECT_INFO_QUICK | OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (!has_object(the_repository, oid, 0))
return -1;
return do_got_oid(data, oid);
}
diff --git a/userdiff.c b/userdiff.c
index da75625020..05776ccd10 100644
--- a/userdiff.c
+++ b/userdiff.c
@@ -59,20 +59,30 @@ PATTERNS("bash",
"("
"("
/* POSIX identifier with mandatory parentheses */
- "[a-zA-Z_][a-zA-Z0-9_]*[ \t]*\\([ \t]*\\))"
+ "([a-zA-Z_][a-zA-Z0-9_]*[ \t]*\\([ \t]*\\))"
"|"
/* Bashism identifier with optional parentheses */
- "(function[ \t]+[a-zA-Z_][a-zA-Z0-9_]*(([ \t]*\\([ \t]*\\))|([ \t]+))"
+ "(function[ \t]+[a-zA-Z_][a-zA-Z0-9_]*(([ \t]*\\([ \t]*\\))|([ \t]+)))"
")"
- /* Optional whitespace */
- "[ \t]*"
- /* Compound command starting with `{`, `(`, `((` or `[[` */
- "(\\{|\\(\\(?|\\[\\[)"
+ /* Everything after the function header is captured */
+ ".*$"
/* End of captured text */
")",
/* -- */
- /* Characters not in the default $IFS value */
- "[^ \t]+"),
+ /* Identifiers: variable and function names */
+ "[a-zA-Z_][a-zA-Z0-9_]*"
+ /* Shell variables: $VAR, ${VAR} */
+ "|\\$[a-zA-Z0-9_]+|\\$\\{"
+ /*Command list separators and redirection operators */
+ "|\\|\\||&&|<<|>>"
+ /* Operators ending in '=' (comparison + compound assignment) */
+ "|==|!=|<=|>=|[-+*/%&|^]="
+ /* Additional parameter expansion operators */
+ "|:=|:-|:\\+|:\\?|##|%%|\\^\\^|,,"
+ /* Command-line options (to avoid splitting -option) */
+ "|[-a-zA-Z0-9_]+"
+ /* Brackets and grouping symbols */
+ "|\\(|\\)|\\{|\\}|\\[|\\]"),
PATTERNS("bibtex",
"(@[a-zA-Z]{1,}[ \t]*\\{{0,1}[ \t]*[^ \t\"@',\\#}{~%]*).*$",
/* -- */
diff --git a/walker.c b/walker.c
index 4fedc19f34..b470d43e54 100644
--- a/walker.c
+++ b/walker.c
@@ -150,7 +150,8 @@ static int process(struct walker *walker, struct object *obj)
return 0;
obj->flags |= SEEN;
- if (repo_has_object_file(the_repository, &obj->oid)) {
+ if (has_object(the_repository, &obj->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
/* We already have it, so we should scan it now. */
obj->flags |= TO_SCAN;
}
diff --git a/wrapper.c b/wrapper.c
index 3c79778055..2f00d2ac87 100644
--- a/wrapper.c
+++ b/wrapper.c
@@ -737,7 +737,26 @@ int is_empty_or_missing_file(const char *filename)
int open_nofollow(const char *path, int flags)
{
#ifdef O_NOFOLLOW
- return open(path, flags | O_NOFOLLOW);
+ int ret = open(path, flags | O_NOFOLLOW);
+ /*
+ * NetBSD sets errno to EFTYPE when path is a symlink. The only other
+ * time this errno occurs when O_REGULAR is used. Since we don't use
+ * it anywhere we can avoid an lstat here. FreeBSD does the same with
+ * EMLINK.
+ */
+# ifdef __NetBSD__
+# define SYMLINK_ERRNO EFTYPE
+# elif defined(__FreeBSD__)
+# define SYMLINK_ERRNO EMLINK
+# endif
+# if SYMLINK_ERRNO
+ if (ret < 0 && errno == SYMLINK_ERRNO) {
+ errno = ELOOP;
+ return -1;
+ }
+# undef SYMLINK_ERRNO
+# endif
+ return ret;
#else
struct stat st;
if (lstat(path, &st) < 0)
diff --git a/xdiff/xprepare.c b/xdiff/xprepare.c
index c84549f6c5..e1d4017b2d 100644
--- a/xdiff/xprepare.c
+++ b/xdiff/xprepare.c
@@ -368,6 +368,7 @@ static int xdl_cleanup_records(xdlclassifier_t *cf, xdfile_t *xdf1, xdfile_t *xd
xrecord_t **recs;
xdlclass_t *rcrec;
char *dis, *dis1, *dis2;
+ int need_min = !!(cf->flags & XDF_NEED_MINIMAL);
if (!XDL_CALLOC_ARRAY(dis, xdf1->nrec + xdf2->nrec + 2))
return -1;
@@ -379,7 +380,7 @@ static int xdl_cleanup_records(xdlclassifier_t *cf, xdfile_t *xdf1, xdfile_t *xd
for (i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart]; i <= xdf1->dend; i++, recs++) {
rcrec = cf->rcrecs[(*recs)->ha];
nm = rcrec ? rcrec->len2 : 0;
- dis1[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
+ dis1[i] = (nm == 0) ? 0: (nm >= mlim && !need_min) ? 2: 1;
}
if ((mlim = xdl_bogosqrt(xdf2->nrec)) > XDL_MAX_EQLIMIT)
@@ -387,7 +388,7 @@ static int xdl_cleanup_records(xdlclassifier_t *cf, xdfile_t *xdf1, xdfile_t *xd
for (i = xdf2->dstart, recs = &xdf2->recs[xdf2->dstart]; i <= xdf2->dend; i++, recs++) {
rcrec = cf->rcrecs[(*recs)->ha];
nm = rcrec ? rcrec->len1 : 0;
- dis2[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
+ dis2[i] = (nm == 0) ? 0: (nm >= mlim && !need_min) ? 2: 1;
}
for (nreff = 0, i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart];